diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 4bc72aec20972..6e15d64154960 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.1", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index c58201258fbbf..9619de3c2c98b 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -269,8 +269,8 @@ steps: env: BWC_VERSION: 8.14.3 - - label: "{{matrix.image}} / 8.15.6 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.6 + - label: "{{matrix.image}} / 8.15.5 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.5 timeout_in_minutes: 300 matrix: setup: @@ -283,7 +283,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.15.6 + BWC_VERSION: 8.15.5 - label: "{{matrix.image}} / 8.16.2 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.2 @@ -301,8 +301,8 @@ steps: env: BWC_VERSION: 8.16.2 - - label: "{{matrix.image}} / 8.17.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.0 + - label: "{{matrix.image}} / 8.17.1 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.1 timeout_in_minutes: 300 matrix: setup: @@ -315,7 +315,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.17.0 + BWC_VERSION: 8.17.1 - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 3d6095d0b9e63..f2d169cd2b30d 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -287,8 +287,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.15.6 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.6#bwcTest + - label: 8.15.5 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.5#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -297,7 +297,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.15.6 + BWC_VERSION: 8.15.5 retry: automatic: - exit_status: "-1" @@ -325,8 +325,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.17.0 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.0#bwcTest + - label: 8.17.1 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.1#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -335,7 +335,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.17.0 + BWC_VERSION: 8.17.1 retry: automatic: - exit_status: "-1" @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.1", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.1", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 826091807ce57..3cb983373138f 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -14,8 +14,8 @@ BWC_VERSION: - "8.12.2" - "8.13.4" - "8.14.3" - - "8.15.6" + - "8.15.5" - "8.16.2" - - "8.17.0" + - "8.17.1" - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index f92881da7fea4..e05c0774c9819 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,6 +1,5 @@ BWC_VERSION: - - "8.15.6" - "8.16.2" - - "8.17.0" + - "8.17.1" - "8.18.0" - "9.0.0" diff --git a/branches.json b/branches.json index 0e23a795664dd..95fbdb1efd655 100644 --- a/branches.json +++ b/branches.json @@ -13,9 +13,6 @@ { "branch": "8.x" }, - { - "branch": "8.15" - }, { "branch": "7.17" } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java index 41c0b4d67e1df..ea9009172c7e2 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java @@ -17,8 +17,6 @@ import org.gradle.api.Project; import java.io.File; -import java.util.Arrays; -import java.util.Map; /** * This plugin configures formatting for Java source using Spotless @@ -66,8 +64,7 @@ public void apply(Project project) { java.importOrderFile(new File(elasticsearchWorkspace, importOrderPath)); // Most formatting is done through the Eclipse formatter - java.eclipse().withP2Mirrors(Map.of("https://download.eclipse.org/", "https://mirror.umd.edu/eclipse/")) - .configFile(new File(elasticsearchWorkspace, formatterConfigPath)); + java.eclipse().configFile(new File(elasticsearchWorkspace, formatterConfigPath)); // Ensure blank lines are actually empty. Since formatters are applied in // order, apply this one last, otherwise non-empty blank lines can creep diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy index a199ff9d3eac5..65f124e5f88e8 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy @@ -439,7 +439,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { // scm info only added for internal builds internalBuild() buildFile << """ - buildParams.getGitOriginProperty().set("https://some-repo.com/repo.git") + buildParams.setGitOrigin("https://some-repo.com/repo.git") apply plugin:'elasticsearch.java' apply plugin:'elasticsearch.publish' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 0d7bcea168df8..d54eb798ce783 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:32f06b169bb4b0f257fbb10e8c8379f06d3ee1355c89b3327cb623781a29590e", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:1b51ff6dba78c98d3e02b0cd64a8ce3238c7a40408d21e3af12a329d44db6f23", "-wolfi", "apk" ), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index c897b142da2fb..ee0eb3f6eb2bf 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -132,7 +132,7 @@ private static void disableTransitiveDependenciesForSourceSet(Project project, S public void configureCompile(Project project) { project.getExtensions().getExtraProperties().set("compactProfile", "full"); JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); - if (buildParams.getJavaToolChainSpec().isPresent()) { + if (buildParams.getJavaToolChainSpec().getOrNull() != null) { java.toolchain(buildParams.getJavaToolChainSpec().get()); } java.setSourceCompatibility(buildParams.getMinimumRuntimeVersion()); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index 720d6a7c2efb6..240b55dedf7ce 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -13,7 +13,6 @@ import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener; import org.elasticsearch.gradle.internal.test.SimpleCommandLineArgumentProvider; @@ -27,7 +26,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; import org.gradle.api.plugins.JavaPlugin; -import org.gradle.api.provider.Property; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; @@ -56,8 +54,7 @@ public abstract class ElasticsearchTestBasePlugin implements Plugin { @Override public void apply(Project project) { project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); - Property buildParams = loadBuildParams(project); - + var buildParams = loadBuildParams(project); project.getPluginManager().apply(GradleTestPolicySetupPlugin.class); // for fips mode check project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index c17127f9bbfcf..da26cb66122ad 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -66,7 +66,7 @@ public void apply(Project project) { project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); project.getPlugins().apply(JvmToolchainsPlugin.class); toolChainService = project.getExtensions().getByType(JavaToolchainService.class); - BuildParameterExtension buildParams = loadBuildParams(project).get(); + var buildParams = loadBuildParams(project).get(); Boolean isCi = buildParams.isCi(); buildParams.getBwcVersions().forPreviousUnreleased((BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> { configureBwcProject( diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index ec694de8ec597..ba587aa4bd979 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -20,7 +20,6 @@ import org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes; import org.elasticsearch.gradle.internal.docker.DockerSupportPlugin; import org.elasticsearch.gradle.internal.docker.DockerSupportService; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.GradleException; @@ -49,7 +48,7 @@ public void apply(Project project) { // this is needed for isInternal project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); - BuildParameterExtension buildParams = loadBuildParams(project).get(); + var buildParams = loadBuildParams(project).get(); DistributionDownloadPlugin distributionDownloadPlugin = project.getPlugins().apply(DistributionDownloadPlugin.class); Provider dockerSupport = GradleUtils.getBuildService( @@ -61,7 +60,7 @@ public void apply(Project project) { ); registerInternalDistributionResolutions( DistributionDownloadPlugin.getRegistrationsContainer(project), - buildParams.getBwcVersionsProperty() + buildParams.getBwcVersionsProvider() ); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java index 7e7ffad12a9a5..c618fe6c2e1bf 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java @@ -10,7 +10,6 @@ package org.elasticsearch.gradle.internal; import org.elasticsearch.gradle.VersionProperties; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; import org.elasticsearch.gradle.testclusters.TestClustersPlugin; @@ -26,7 +25,7 @@ public class InternalTestClustersPlugin implements Plugin { public void apply(Project project) { project.getPlugins().apply(InternalDistributionDownloadPlugin.class); project.getRootProject().getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); - BuildParameterExtension buildParams = loadBuildParams(project).get(); + var buildParams = loadBuildParams(project).get(); project.getRootProject().getPluginManager().apply(InternalReaperPlugin.class); TestClustersPlugin testClustersPlugin = project.getPlugins().apply(TestClustersPlugin.class); testClustersPlugin.setRuntimeJava(buildParams.getRuntimeJavaHome()); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java index 5531194e0abde..e80dc6ef1b44c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java @@ -13,175 +13,58 @@ import org.gradle.api.Action; import org.gradle.api.JavaVersion; import org.gradle.api.Task; -import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; -import org.gradle.api.provider.ProviderFactory; import org.gradle.jvm.toolchain.JavaToolchainSpec; import java.io.File; -import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.List; import java.util.Random; -import java.util.concurrent.atomic.AtomicReference; - -public abstract class BuildParameterExtension { - private final Provider inFipsJvm; - private final Provider runtimeJavaHome; - private final Boolean isRuntimeJavaHomeSet; - private final List javaVersions; - private final JavaVersion minimumCompilerVersion; - private final JavaVersion minimumRuntimeVersion; - private final JavaVersion gradleJavaVersion; - private final Provider runtimeJavaVersion; - private final Provider> javaToolChainSpec; - private final Provider runtimeJavaDetails; - private final String gitRevision; - private transient AtomicReference buildDate = new AtomicReference<>(); - private final String testSeed; - private final Boolean isCi; - private final Integer defaultParallel; - private final Boolean isSnapshotBuild; - - public BuildParameterExtension( - ProviderFactory providers, - Provider runtimeJavaHome, - Provider> javaToolChainSpec, - Provider runtimeJavaVersion, - boolean isRuntimeJavaHomeSet, - Provider runtimeJavaDetails, - List javaVersions, - JavaVersion minimumCompilerVersion, - JavaVersion minimumRuntimeVersion, - JavaVersion gradleJavaVersion, - String gitRevision, - String gitOrigin, - ZonedDateTime buildDate, - String testSeed, - boolean isCi, - int defaultParallel, - final boolean isSnapshotBuild, - Provider bwcVersions - ) { - this.inFipsJvm = providers.systemProperty("tests.fips.enabled").map(BuildParameterExtension::parseBoolean); - this.runtimeJavaHome = runtimeJavaHome; - this.javaToolChainSpec = javaToolChainSpec; - this.runtimeJavaVersion = runtimeJavaVersion; - this.isRuntimeJavaHomeSet = isRuntimeJavaHomeSet; - this.runtimeJavaDetails = runtimeJavaDetails; - this.javaVersions = javaVersions; - this.minimumCompilerVersion = minimumCompilerVersion; - this.minimumRuntimeVersion = minimumRuntimeVersion; - this.gradleJavaVersion = gradleJavaVersion; - this.gitRevision = gitRevision; - this.testSeed = testSeed; - this.isCi = isCi; - this.defaultParallel = defaultParallel; - this.isSnapshotBuild = isSnapshotBuild; - this.getBwcVersionsProperty().set(bwcVersions); - this.getGitOriginProperty().set(gitOrigin); - } - - private static boolean parseBoolean(String s) { - if (s == null) { - return false; - } - return Boolean.parseBoolean(s); - } - - public boolean getInFipsJvm() { - return inFipsJvm.getOrElse(false); - } - - public Provider getRuntimeJavaHome() { - return runtimeJavaHome; - } - - public void withFipsEnabledOnly(Task task) { - task.onlyIf("FIPS mode disabled", task1 -> getInFipsJvm() == false); - } - - public Boolean getIsRuntimeJavaHomeSet() { - return isRuntimeJavaHomeSet; - } - - public List getJavaVersions() { - return javaVersions; - } - - public JavaVersion getMinimumCompilerVersion() { - return minimumCompilerVersion; - } - - public JavaVersion getMinimumRuntimeVersion() { - return minimumRuntimeVersion; - } - - public JavaVersion getGradleJavaVersion() { - return gradleJavaVersion; - } - - public Provider getRuntimeJavaVersion() { - return runtimeJavaVersion; - } - - public Provider> getJavaToolChainSpec() { - return javaToolChainSpec; - } - - public Provider getRuntimeJavaDetails() { - return runtimeJavaDetails; - } - - public String getGitRevision() { - return gitRevision; - } - - public String getGitOrigin() { - return getGitOriginProperty().get(); - } - - public ZonedDateTime getBuildDate() { - ZonedDateTime value = buildDate.get(); - if (value == null) { - value = ZonedDateTime.now(ZoneOffset.UTC); - if (buildDate.compareAndSet(null, value) == false) { - // If another thread initialized it first, return the initialized value - value = buildDate.get(); - } - } - return value; - } - - public String getTestSeed() { - return testSeed; - } - - public Boolean isCi() { - return isCi; - } - - public Integer getDefaultParallel() { - return defaultParallel; - } - - public Boolean isSnapshotBuild() { - return isSnapshotBuild; - } - - public BwcVersions getBwcVersions() { - return getBwcVersionsProperty().get(); - } - - public abstract Property getBwcVersionsProperty(); - - public abstract Property getGitOriginProperty(); - - public Random getRandom() { - return new Random(Long.parseUnsignedLong(testSeed.split(":")[0], 16)); - } - - public Boolean isGraalVmRuntime() { - return runtimeJavaDetails.get().toLowerCase().contains("graalvm"); - } + +public interface BuildParameterExtension { + String EXTENSION_NAME = "buildParams"; + + boolean getInFipsJvm(); + + Provider getRuntimeJavaHome(); + + void withFipsEnabledOnly(Task task); + + Boolean getIsRuntimeJavaHomeSet(); + + List getJavaVersions(); + + JavaVersion getMinimumCompilerVersion(); + + JavaVersion getMinimumRuntimeVersion(); + + JavaVersion getGradleJavaVersion(); + + Provider getRuntimeJavaVersion(); + + Provider> getJavaToolChainSpec(); + + Provider getRuntimeJavaDetails(); + + String getGitRevision(); + + String getGitOrigin(); + + ZonedDateTime getBuildDate(); + + String getTestSeed(); + + Boolean isCi(); + + Integer getDefaultParallel(); + + Boolean isSnapshotBuild(); + + BwcVersions getBwcVersions(); + + Provider getBwcVersionsProvider(); + + Random getRandom(); + + Boolean isGraalVmRuntime(); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java new file mode 100644 index 0000000000000..faac406d974c6 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info; + +import org.elasticsearch.gradle.internal.BwcVersions; +import org.gradle.api.Action; +import org.gradle.api.JavaVersion; +import org.gradle.api.Task; +import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; +import org.gradle.jvm.toolchain.JavaToolchainSpec; + +import java.io.File; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.List; +import java.util.Random; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; + +public abstract class DefaultBuildParameterExtension implements BuildParameterExtension { + private final Provider inFipsJvm; + private final Provider runtimeJavaHome; + private final Boolean isRuntimeJavaHomeSet; + private final List javaVersions; + private final JavaVersion minimumCompilerVersion; + private final JavaVersion minimumRuntimeVersion; + private final JavaVersion gradleJavaVersion; + private final Provider runtimeJavaVersion; + private final Provider> javaToolChainSpec; + private final Provider runtimeJavaDetails; + private final String gitRevision; + + private transient AtomicReference buildDate = new AtomicReference<>(); + private final String testSeed; + private final Boolean isCi; + private final Integer defaultParallel; + private final Boolean isSnapshotBuild; + + // not final for testing + private Provider bwcVersions; + private String gitOrigin; + + public DefaultBuildParameterExtension( + ProviderFactory providers, + Provider runtimeJavaHome, + Provider> javaToolChainSpec, + Provider runtimeJavaVersion, + boolean isRuntimeJavaHomeSet, + Provider runtimeJavaDetails, + List javaVersions, + JavaVersion minimumCompilerVersion, + JavaVersion minimumRuntimeVersion, + JavaVersion gradleJavaVersion, + String gitRevision, + String gitOrigin, + String testSeed, + boolean isCi, + int defaultParallel, + final boolean isSnapshotBuild, + Provider bwcVersions + ) { + this.inFipsJvm = providers.systemProperty("tests.fips.enabled").map(DefaultBuildParameterExtension::parseBoolean); + this.runtimeJavaHome = cache(providers, runtimeJavaHome); + this.javaToolChainSpec = cache(providers, javaToolChainSpec); + this.runtimeJavaVersion = cache(providers, runtimeJavaVersion); + this.isRuntimeJavaHomeSet = isRuntimeJavaHomeSet; + this.runtimeJavaDetails = cache(providers, runtimeJavaDetails); + this.javaVersions = javaVersions; + this.minimumCompilerVersion = minimumCompilerVersion; + this.minimumRuntimeVersion = minimumRuntimeVersion; + this.gradleJavaVersion = gradleJavaVersion; + this.gitRevision = gitRevision; + this.testSeed = testSeed; + this.isCi = isCi; + this.defaultParallel = defaultParallel; + this.isSnapshotBuild = isSnapshotBuild; + this.bwcVersions = cache(providers, bwcVersions); + this.gitOrigin = gitOrigin; + } + + // This is a workaround for https://github.com/gradle/gradle/issues/25550 + private Provider cache(ProviderFactory providerFactory, Provider incomingProvider) { + SingleObjectCache cache = new SingleObjectCache<>(); + return providerFactory.provider(() -> cache.computeIfAbsent(() -> incomingProvider.getOrNull())); + } + + private static boolean parseBoolean(String s) { + if (s == null) { + return false; + } + return Boolean.parseBoolean(s); + } + + @Override + public boolean getInFipsJvm() { + return inFipsJvm.getOrElse(false); + } + + @Override + public Provider getRuntimeJavaHome() { + return runtimeJavaHome; + } + + @Override + public void withFipsEnabledOnly(Task task) { + task.onlyIf("FIPS mode disabled", task1 -> getInFipsJvm() == false); + } + + @Override + public Boolean getIsRuntimeJavaHomeSet() { + return isRuntimeJavaHomeSet; + } + + @Override + public List getJavaVersions() { + return javaVersions; + } + + @Override + public JavaVersion getMinimumCompilerVersion() { + return minimumCompilerVersion; + } + + @Override + public JavaVersion getMinimumRuntimeVersion() { + return minimumRuntimeVersion; + } + + @Override + public JavaVersion getGradleJavaVersion() { + return gradleJavaVersion; + } + + @Override + public Provider getRuntimeJavaVersion() { + return runtimeJavaVersion; + } + + @Override + public Provider> getJavaToolChainSpec() { + return javaToolChainSpec; + } + + @Override + public Provider getRuntimeJavaDetails() { + return runtimeJavaDetails; + } + + @Override + public String getGitRevision() { + return gitRevision; + } + + @Override + public String getGitOrigin() { + return gitOrigin; + } + + @Override + public ZonedDateTime getBuildDate() { + ZonedDateTime value = buildDate.get(); + if (value == null) { + value = ZonedDateTime.now(ZoneOffset.UTC); + if (buildDate.compareAndSet(null, value) == false) { + // If another thread initialized it first, return the initialized value + value = buildDate.get(); + } + } + return value; + } + + @Override + public String getTestSeed() { + return testSeed; + } + + @Override + public Boolean isCi() { + return isCi; + } + + @Override + public Integer getDefaultParallel() { + return defaultParallel; + } + + @Override + public Boolean isSnapshotBuild() { + return isSnapshotBuild; + } + + @Override + public BwcVersions getBwcVersions() { + return bwcVersions.get(); + } + + @Override + public Random getRandom() { + return new Random(Long.parseUnsignedLong(testSeed.split(":")[0], 16)); + } + + @Override + public Boolean isGraalVmRuntime() { + return runtimeJavaDetails.get().toLowerCase().contains("graalvm"); + } + + private static class SingleObjectCache { + private T instance; + + public T computeIfAbsent(Supplier supplier) { + synchronized (this) { + if (instance == null) { + instance = supplier.get(); + } + return instance; + } + } + + public T get() { + return instance; + } + } + + public Provider getBwcVersionsProvider() { + return bwcVersions; + } + + // for testing; not part of public api + public void setBwcVersions(Provider bwcVersions) { + this.bwcVersions = bwcVersions; + } + + // for testing; not part of public api + public void setGitOrigin(String gitOrigin) { + this.gitOrigin = gitOrigin; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 27d2a66feb206..86f59aa0ab41e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -51,8 +51,6 @@ import java.io.InputStreamReader; import java.io.UncheckedIOException; import java.nio.file.Files; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -124,8 +122,10 @@ public void apply(Project project) { ); BuildParameterExtension buildParams = project.getExtensions() .create( - "buildParams", BuildParameterExtension.class, + BuildParameterExtension.EXTENSION_NAME, + DefaultBuildParameterExtension.class, + providers, actualRuntimeJavaHome, resolveToolchainSpecFromEnv(), actualRuntimeJavaHome.map( @@ -145,7 +145,6 @@ public void apply(Project project) { Jvm.current().getJavaVersion(), gitInfo.getRevision(), gitInfo.getOrigin(), - ZonedDateTime.now(ZoneOffset.UTC), getTestSeed(), System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null, ParallelDetector.findDefaultParallel(project), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java index f70e25a57e331..e45a1d3dd25b1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java @@ -12,12 +12,10 @@ import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin; import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.component.ModuleComponentIdentifier; -import org.gradle.api.provider.Property; import org.gradle.api.tasks.TaskProvider; import java.io.File; @@ -34,7 +32,7 @@ public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { @Override public TaskProvider createTask(Project project) { project.getRootProject().getPlugins().apply(CompileOnlyResolvePlugin.class); - Property buildParams = loadBuildParams(project); + var buildParams = loadBuildParams(project); project.getPlugins().apply(CompileOnlyResolvePlugin.class); project.getConfigurations().create("forbiddenApisCliJar"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java index fa10daf8dfaaa..704394b4f01a9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java @@ -10,7 +10,6 @@ package org.elasticsearch.gradle.internal.snyk; import org.elasticsearch.gradle.internal.conventions.info.GitInfo; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -18,7 +17,6 @@ import org.gradle.api.file.ProjectLayout; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; -import org.gradle.api.provider.Property; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.SourceSet; @@ -41,7 +39,7 @@ public SnykDependencyMonitoringGradlePlugin(ProjectLayout projectLayout, Provide @Override public void apply(Project project) { project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); - Property buildParams = loadBuildParams(project); + var buildParams = loadBuildParams(project); var generateTaskProvider = project.getTasks() .register("generateSnykDependencyGraph", GenerateSnykDependencyGraph.class, generateSnykDependencyGraph -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java index 68711881b02f4..94018d1501e0b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java @@ -35,7 +35,7 @@ public class TestWithSslPlugin implements Plugin { @Override public void apply(Project project) { File keyStoreDir = new File(project.getBuildDir(), "keystore"); - BuildParameterExtension buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); + var buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); TaskProvider exportKeyStore = project.getTasks() .register("copyTestCertificates", ExportElasticsearchBuildResourcesTask.class, (t) -> { t.copy("test/ssl/test-client.crt"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java index ca669276123b3..b511702d1c7c3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java @@ -11,7 +11,6 @@ import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.test.rest.CopyRestApiTask; import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask; @@ -78,7 +77,7 @@ public AbstractYamlRestCompatTestPlugin(ProjectLayout projectLayout, FileOperati @Override public void apply(Project project) { project.getRootProject().getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); - BuildParameterExtension buildParams = loadBuildParams(project).get(); + var buildParams = loadBuildParams(project).get(); final Path compatRestResourcesDir = Path.of("restResources").resolve("compat"); final Path compatSpecsDir = compatRestResourcesDir.resolve("yamlSpecs"); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy new file mode 100644 index 0000000000000..343268b9b4d47 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info + +import spock.lang.Specification + +import org.elasticsearch.gradle.internal.BwcVersions +import org.gradle.api.JavaVersion +import org.gradle.api.Project +import org.gradle.api.provider.Provider +import org.gradle.api.provider.ProviderFactory +import org.gradle.testfixtures.ProjectBuilder +import org.junit.Assert + +import java.util.concurrent.CountDownLatch +import java.util.concurrent.Executors +import java.util.concurrent.Future +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicInteger + +import static org.junit.Assert.fail + +class BuildParameterExtensionSpec extends Specification { + + ProjectBuilder projectBuilder = new ProjectBuilder() + + def "#getterName is cached anc concurrently accessible"() { + given: + def project = projectBuilder.build() + def providers = project.getProviders(); + def buildParams = extension(project, providers) + int numberOfThreads = 10; + when: + var service = Executors.newFixedThreadPool(numberOfThreads) + var latch = new CountDownLatch(numberOfThreads) + def testedProvider = buildParams."$getterName"() + def futures = (1..numberOfThreads).collect { + service.submit( + () -> { + try { + testedProvider.get() + } catch (AssertionError e) { + latch.countDown() + Assert.fail("Accessing cached provider more than once") + } + latch.countDown() + } + ) + } + latch.await(10, TimeUnit.SECONDS) + + then: + futures.collect { it.state() }.any() { it == Future.State.FAILED } == false + + where: + getterName << [ + "getRuntimeJavaHome", + "getJavaToolChainSpec", + "getRuntimeJavaDetails", + "getRuntimeJavaVersion", + "getBwcVersionsProvider" + ] + } + + private BuildParameterExtension extension(Project project, ProviderFactory providers) { + return project.getExtensions().create( + BuildParameterExtension.class, "buildParameters", DefaultBuildParameterExtension.class, + providers, + providerMock(), + providerMock(), + providerMock(), + true, + providerMock(), + [ + Mock(JavaHome), + Mock(JavaHome), + ], + JavaVersion.VERSION_11, + JavaVersion.VERSION_11, + JavaVersion.VERSION_11, + "gitRevision", + "gitOrigin", + "testSeed", + false, + 5, + true, + // cannot use Mock here because of the way the provider is used by gradle internal property api + providerMock() + ) + } + + private Provider providerMock() { + Provider provider = Mock(Provider) + AtomicInteger counter = new AtomicInteger(0) + provider.getOrNull() >> { + println "accessing provider" + return counter.get() == 1 ? fail("Accessing cached provider more than once") : counter.incrementAndGet() + } + provider.get() >> { + fail("Accessing cached provider directly") + } + return provider + + } +} diff --git a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy index 07214b5fbf845..fe23204d5601c 100644 --- a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy +++ b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy @@ -183,7 +183,7 @@ abstract class AbstractGradleFuncTest extends Specification { ] BwcVersions versions = new BwcVersions(currentVersion, versionList, ['main', '8.x', '8.3', '8.2', '8.1', '7.16']) - buildParams.getBwcVersionsProperty().set(versions) + buildParams.setBwcVersions(project.provider { versions} ) """ } diff --git a/build.gradle b/build.gradle index 715614c1beea4..b95e34640cb5f 100644 --- a/build.gradle +++ b/build.gradle @@ -301,7 +301,10 @@ allprojects { if (project.path.contains(":distribution:docker")) { enabled = false } - + if (project.path.contains(":libs:cli")) { + // ensure we resolve p2 dependencies for the spotless eclipse formatter + dependsOn "spotlessJavaCheck" + } } plugins.withId('lifecycle-base') { diff --git a/docs/changelog/104683.yaml b/docs/changelog/104683.yaml deleted file mode 100644 index d4f40b59cfd91..0000000000000 --- a/docs/changelog/104683.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104683 -summary: "Feature: re-structure document ID generation favoring _id inverted index compression" -area: Logs -type: enhancement -issues: [] diff --git a/docs/changelog/112881.yaml b/docs/changelog/112881.yaml deleted file mode 100644 index a8a0d542f8201..0000000000000 --- a/docs/changelog/112881.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112881 -summary: "ESQL: Remove parent from `FieldAttribute`" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/112989.yaml b/docs/changelog/112989.yaml deleted file mode 100644 index 364f012f94420..0000000000000 --- a/docs/changelog/112989.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112989 -summary: Upgrade Bouncy Castle FIPS dependencies -area: Security -type: upgrade -issues: [] diff --git a/docs/changelog/113194.yaml b/docs/changelog/113194.yaml deleted file mode 100644 index 132659321c65e..0000000000000 --- a/docs/changelog/113194.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113194 -summary: Add Search Phase APM metrics -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/113713.yaml b/docs/changelog/113713.yaml deleted file mode 100644 index c5478c95e464d..0000000000000 --- a/docs/changelog/113713.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113713 -summary: Adding inference endpoint validation for `AzureAiStudioService` -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/113920.yaml b/docs/changelog/113920.yaml deleted file mode 100644 index 4699ae6d7dd65..0000000000000 --- a/docs/changelog/113920.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113920 -summary: Add initial support for `semantic_text` field type -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/114334.yaml b/docs/changelog/114334.yaml deleted file mode 100644 index d0fefe40c6970..0000000000000 --- a/docs/changelog/114334.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 114334 -summary: Don't return TEXT type for functions that take TEXT -area: ES|QL -type: bug -issues: - - 111537 - - 114333 diff --git a/docs/changelog/114482.yaml b/docs/changelog/114482.yaml deleted file mode 100644 index a5e2e981f7adc..0000000000000 --- a/docs/changelog/114482.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114482 -summary: Remove snapshot build restriction for match and qstr functions -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/114484.yaml b/docs/changelog/114484.yaml deleted file mode 100644 index 48f54ad0218bb..0000000000000 --- a/docs/changelog/114484.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114484 -summary: Add `docvalue_fields` Support for `dense_vector` Fields -area: Search -type: enhancement -issues: - - 108470 diff --git a/docs/changelog/114620.yaml b/docs/changelog/114620.yaml deleted file mode 100644 index 92498db92061f..0000000000000 --- a/docs/changelog/114620.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114620 -summary: "ES|QL: add metrics for functions" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/114665.yaml b/docs/changelog/114665.yaml deleted file mode 100644 index b90bb799bd896..0000000000000 --- a/docs/changelog/114665.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114665 -summary: Fixing remote ENRICH by pushing the Enrich inside `FragmentExec` -area: ES|QL -type: bug -issues: - - 105095 diff --git a/docs/changelog/114681.yaml b/docs/changelog/114681.yaml deleted file mode 100644 index 2a9901114e56f..0000000000000 --- a/docs/changelog/114681.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114681 -summary: "Support for unsigned 64 bit numbers in Cpu stats" -area: Infra/Core -type: enhancement -issues: - - 112274 diff --git a/docs/changelog/114742.yaml b/docs/changelog/114742.yaml deleted file mode 100644 index 5bd3dad4400b8..0000000000000 --- a/docs/changelog/114742.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114742 -summary: Adding support for additional mapping to simulate ingest API -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/114819.yaml b/docs/changelog/114819.yaml deleted file mode 100644 index f8d03f7024801..0000000000000 --- a/docs/changelog/114819.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114819 -summary: Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` -area: EQL -type: bug -issues: - - 114599 diff --git a/docs/changelog/114855.yaml b/docs/changelog/114855.yaml deleted file mode 100644 index daa6b985a14cf..0000000000000 --- a/docs/changelog/114855.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114855 -summary: Add query rules retriever -area: Relevance -type: enhancement -issues: [ ] diff --git a/docs/changelog/114862.yaml b/docs/changelog/114862.yaml deleted file mode 100644 index fb5f05fb8e2f9..0000000000000 --- a/docs/changelog/114862.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114862 -summary: "[Inference API] Add API to get configuration of inference services" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/114869.yaml b/docs/changelog/114869.yaml deleted file mode 100644 index 755418e7ce4d9..0000000000000 --- a/docs/changelog/114869.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114869 -summary: Standardize error code when bulk body is invalid -area: CRUD -type: bug -issues: [] diff --git a/docs/changelog/114899.yaml b/docs/changelog/114899.yaml deleted file mode 100644 index 399aa5cf35409..0000000000000 --- a/docs/changelog/114899.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114899 -summary: "ES|QL: Fix stats by constant expression" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/114924.yaml b/docs/changelog/114924.yaml deleted file mode 100644 index 536f446ef790d..0000000000000 --- a/docs/changelog/114924.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114924 -summary: Reducing error-level stack trace logging for normal events in `GeoIpDownloader` -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/114934.yaml b/docs/changelog/114934.yaml deleted file mode 100644 index 68628993b1c80..0000000000000 --- a/docs/changelog/114934.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114934 -summary: "[ES|QL] To_DatePeriod and To_TimeDuration return better error messages on\ - \ `union_type` fields" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/114964.yaml b/docs/changelog/114964.yaml deleted file mode 100644 index 8274aeb76a937..0000000000000 --- a/docs/changelog/114964.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114964 -summary: Add a `monitor_stats` privilege and allow that privilege for remote cluster - privileges -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/115041.yaml b/docs/changelog/115041.yaml deleted file mode 100644 index f4c047c1569ec..0000000000000 --- a/docs/changelog/115041.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115041 -summary: Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` - to 100_000 -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/115091.yaml b/docs/changelog/115091.yaml deleted file mode 100644 index 762bcca5e8c52..0000000000000 --- a/docs/changelog/115091.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 115091 -summary: Added stricter range type checks and runtime warnings for ENRICH -area: ES|QL -type: bug -issues: - - 107357 - - 116799 diff --git a/docs/changelog/115102.yaml b/docs/changelog/115102.yaml deleted file mode 100644 index f679bb6c223a6..0000000000000 --- a/docs/changelog/115102.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115102 -summary: Watch Next Run Interval Resets On Shard Move or Node Restart -area: Watcher -type: bug -issues: - - 111433 diff --git a/docs/changelog/115142.yaml b/docs/changelog/115142.yaml deleted file mode 100644 index 2af968ae156da..0000000000000 --- a/docs/changelog/115142.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115142 -summary: Attempt to clean up index before remote transfer -area: Recovery -type: enhancement -issues: - - 104473 diff --git a/docs/changelog/115266.yaml b/docs/changelog/115266.yaml deleted file mode 100644 index 1d7fb1368c0e8..0000000000000 --- a/docs/changelog/115266.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115266 -summary: ES|QL CCS uses `skip_unavailable` setting for handling disconnected remote - clusters -area: ES|QL -type: enhancement -issues: [ 114531 ] diff --git a/docs/changelog/115359.yaml b/docs/changelog/115359.yaml deleted file mode 100644 index 65b3086dfc8d0..0000000000000 --- a/docs/changelog/115359.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115359 -summary: Adding support for simulate ingest mapping adddition for indices with mappings - that do not come from templates -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/115414.yaml b/docs/changelog/115414.yaml deleted file mode 100644 index 7475b765bb30e..0000000000000 --- a/docs/changelog/115414.yaml +++ /dev/null @@ -1,9 +0,0 @@ -pr: 115414 -summary: Mitigate IOSession timeouts -area: Machine Learning -type: bug -issues: - - 114385 - - 114327 - - 114105 - - 114232 diff --git a/docs/changelog/115585.yaml b/docs/changelog/115585.yaml deleted file mode 100644 index 02eecfc3d7d2b..0000000000000 --- a/docs/changelog/115585.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115459 -summary: Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter -area: Search -type: enhancement -issues: - - 97849 diff --git a/docs/changelog/115640.yaml b/docs/changelog/115640.yaml deleted file mode 100644 index 5c4a943a9697d..0000000000000 --- a/docs/changelog/115640.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115640 -summary: Fix NPE on plugin sync -area: Infra/CLI -type: bug -issues: - - 114818 diff --git a/docs/changelog/115655.yaml b/docs/changelog/115655.yaml deleted file mode 100644 index 7184405867657..0000000000000 --- a/docs/changelog/115655.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115655 -summary: Better sizing `BytesRef` for Strings in Queries -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/115678.yaml b/docs/changelog/115678.yaml deleted file mode 100644 index 31240eae1ebb4..0000000000000 --- a/docs/changelog/115678.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115678 -summary: "ESQL: extract common filter from aggs" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/115687.yaml b/docs/changelog/115687.yaml deleted file mode 100644 index 1180b4627c635..0000000000000 --- a/docs/changelog/115687.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115687 -summary: Add default ILM policies and switch to ILM for apm-data plugin -area: Data streams -type: feature -issues: [] diff --git a/docs/changelog/115744.yaml b/docs/changelog/115744.yaml deleted file mode 100644 index 9b8c91e59f451..0000000000000 --- a/docs/changelog/115744.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115744 -summary: Use `SearchStats` instead of field.isAggregatable in data node planning -area: ES|QL -type: bug -issues: - - 115737 diff --git a/docs/changelog/115792.yaml b/docs/changelog/115792.yaml deleted file mode 100644 index 2945a64e3043a..0000000000000 --- a/docs/changelog/115792.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115792 -summary: Add ES|QL `bit_length` function -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/115797.yaml b/docs/changelog/115797.yaml deleted file mode 100644 index 8adf51887c28a..0000000000000 --- a/docs/changelog/115797.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115797 -summary: Enable `_tier` based coordinator rewrites for all indices (not just mounted - indices) -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/115807.yaml b/docs/changelog/115807.yaml deleted file mode 100644 index d17cabca4bd03..0000000000000 --- a/docs/changelog/115807.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115807 -summary: "[Inference API] Improve chunked results error message" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/115812.yaml b/docs/changelog/115812.yaml deleted file mode 100644 index c45c97041eb00..0000000000000 --- a/docs/changelog/115812.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115812 -summary: "Prohibit changes to index mode, source, and sort settings during resize" -area: Logs -type: bug -issues: [] diff --git a/docs/changelog/115814.yaml b/docs/changelog/115814.yaml deleted file mode 100644 index 34f1213272d6f..0000000000000 --- a/docs/changelog/115814.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115814 -summary: "[ES|QL] Implicit casting string literal to intervals" -area: ES|QL -type: enhancement -issues: - - 115352 diff --git a/docs/changelog/115858.yaml b/docs/changelog/115858.yaml deleted file mode 100644 index 0c0408fa656f8..0000000000000 --- a/docs/changelog/115858.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115858 -summary: "ESQL: optimise aggregations filtered by false/null into evals" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/115994.yaml b/docs/changelog/115994.yaml deleted file mode 100644 index ac090018c8a12..0000000000000 --- a/docs/changelog/115994.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115994 -summary: Add logsdb telemetry -area: Logs -type: enhancement -issues: [] diff --git a/docs/changelog/116021.yaml b/docs/changelog/116021.yaml deleted file mode 100644 index 58c84b26805b2..0000000000000 --- a/docs/changelog/116021.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116021 -summary: Fields caps does not honour ignore_unavailable -area: Search -type: bug -issues: - - 107767 diff --git a/docs/changelog/116082.yaml b/docs/changelog/116082.yaml deleted file mode 100644 index 35ca5fb1ea82e..0000000000000 --- a/docs/changelog/116082.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116082 -summary: Add support for bitwise inner-product in painless -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/116128.yaml b/docs/changelog/116128.yaml deleted file mode 100644 index 7c38c0529c50d..0000000000000 --- a/docs/changelog/116128.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116128 -summary: Add num docs and size to logsdb telemetry -area: Logs -type: enhancement -issues: [] diff --git a/docs/changelog/116211.yaml b/docs/changelog/116211.yaml deleted file mode 100644 index 6f55b1b2fef34..0000000000000 --- a/docs/changelog/116211.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116211 -summary: Use underlying `ByteBuf` `refCount` for `ReleasableBytesReference` -area: Network -type: bug -issues: [] diff --git a/docs/changelog/116325.yaml b/docs/changelog/116325.yaml deleted file mode 100644 index b8cd16dc85773..0000000000000 --- a/docs/changelog/116325.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116325 -summary: Adjust analyze limit exception to be a `bad_request` -area: Analysis -type: bug -issues: [] diff --git a/docs/changelog/116346.yaml b/docs/changelog/116346.yaml deleted file mode 100644 index 1dcace88a98c0..0000000000000 --- a/docs/changelog/116346.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116346 -summary: "[ESQL] Fix Binary Comparisons on Date Nanos" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/116348.yaml b/docs/changelog/116348.yaml deleted file mode 100644 index 927ffc5a6121d..0000000000000 --- a/docs/changelog/116348.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116348 -summary: "ESQL: Honor skip_unavailable setting for nonmatching indices errors at planning time" -area: ES|QL -type: enhancement -issues: [ 114531 ] diff --git a/docs/changelog/116423.yaml b/docs/changelog/116423.yaml new file mode 100644 index 0000000000000..d6d10eab410e4 --- /dev/null +++ b/docs/changelog/116423.yaml @@ -0,0 +1,5 @@ +pr: 116423 +summary: Support mTLS for the Elastic Inference Service integration inside the inference API +area: Machine Learning +type: feature +issues: [] diff --git a/docs/changelog/116431.yaml b/docs/changelog/116431.yaml deleted file mode 100644 index 50c6baf1d01c7..0000000000000 --- a/docs/changelog/116431.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116431 -summary: Adds support for `input_type` field to Vertex inference service -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/116437.yaml b/docs/changelog/116437.yaml deleted file mode 100644 index 94c2464db9980..0000000000000 --- a/docs/changelog/116437.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116437 -summary: Ensure class resource stream is closed in `ResourceUtils` -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/116447.yaml b/docs/changelog/116447.yaml deleted file mode 100644 index 8c0cea4b54578..0000000000000 --- a/docs/changelog/116447.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116447 -summary: Adding a deprecation info API warning for data streams with old indices -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/116515.yaml b/docs/changelog/116515.yaml deleted file mode 100644 index 6c0d473361e52..0000000000000 --- a/docs/changelog/116515.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116515 -summary: Esql/lookup join grammar -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/116583.yaml b/docs/changelog/116583.yaml deleted file mode 100644 index 3dc8337fe5b86..0000000000000 --- a/docs/changelog/116583.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 116583 -summary: Fix NPE in `EnrichLookupService` on mixed clusters with <8.14 versions -area: ES|QL -type: bug -issues: - - 116529 - - 116544 diff --git a/docs/changelog/116591.yaml b/docs/changelog/116591.yaml deleted file mode 100644 index 60ef241e197b3..0000000000000 --- a/docs/changelog/116591.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116591 -summary: "Add support for `BYTE_LENGTH` scalar function" -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/116656.yaml b/docs/changelog/116656.yaml deleted file mode 100644 index eb5d5a1cfc201..0000000000000 --- a/docs/changelog/116656.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116656 -summary: _validate does not honour ignore_unavailable -area: Search -type: bug -issues: - - 116594 diff --git a/docs/changelog/116664.yaml b/docs/changelog/116664.yaml deleted file mode 100644 index 36915fca39731..0000000000000 --- a/docs/changelog/116664.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116664 -summary: Hides `hugging_face_elser` service from the `GET _inference/_services API` -area: Machine Learning -type: bug -issues: - - 116644 diff --git a/docs/changelog/116689.yaml b/docs/changelog/116689.yaml deleted file mode 100644 index 0b1d1646868aa..0000000000000 --- a/docs/changelog/116689.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 116689 -summary: Deprecate `_source.mode` in mappings -area: Mapping -type: deprecation -issues: [] -deprecation: - title: Deprecate `_source.mode` in mappings - area: Mapping - details: Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. - impact: Use `index.mapping.source.mode` index setting instead diff --git a/docs/changelog/116809.yaml b/docs/changelog/116809.yaml deleted file mode 100644 index 61dbeb233d576..0000000000000 --- a/docs/changelog/116809.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116809 -summary: "Distinguish `LicensedFeature` by family field" -area: License -type: bug -issues: [] diff --git a/docs/changelog/116819.yaml b/docs/changelog/116819.yaml deleted file mode 100644 index afe06c583fe55..0000000000000 --- a/docs/changelog/116819.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116819 -summary: ESQL - Add match operator (:) -area: Search -type: feature -issues: [] diff --git a/docs/changelog/116931.yaml b/docs/changelog/116931.yaml deleted file mode 100644 index 8b31d236ff137..0000000000000 --- a/docs/changelog/116931.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116931 -summary: Enable built-in Inference Endpoints and default for Semantic Text -area: "Machine Learning" -type: enhancement -issues: [] diff --git a/docs/changelog/116953.yaml b/docs/changelog/116953.yaml deleted file mode 100644 index 33616510d8fd0..0000000000000 --- a/docs/changelog/116953.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116953 -summary: Fix false positive date detection with trailing dot -area: Mapping -type: bug -issues: - - 116946 diff --git a/docs/changelog/116957.yaml b/docs/changelog/116957.yaml deleted file mode 100644 index 1020190de180d..0000000000000 --- a/docs/changelog/116957.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116957 -summary: Propagate scoring function through random sampler -area: Machine Learning -type: bug -issues: [ 110134 ] diff --git a/docs/changelog/116962.yaml b/docs/changelog/116962.yaml deleted file mode 100644 index 8f16b00e3f9fc..0000000000000 --- a/docs/changelog/116962.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116962 -summary: "Add special case for elastic reranker in inference API" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/116980.yaml b/docs/changelog/116980.yaml deleted file mode 100644 index 140324fd40b92..0000000000000 --- a/docs/changelog/116980.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116980 -summary: "ESQL: Fix sorts containing `_source`" -area: ES|QL -type: bug -issues: - - 116659 diff --git a/docs/changelog/117080.yaml b/docs/changelog/117080.yaml deleted file mode 100644 index 5909f966e0fa2..0000000000000 --- a/docs/changelog/117080.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117080 -summary: Esql Enable Date Nanos (tech preview) -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/117105.yaml b/docs/changelog/117105.yaml deleted file mode 100644 index de56c4d521a62..0000000000000 --- a/docs/changelog/117105.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117105 -summary: Fix long metric deserialize & add - auto-resize needs to be set manually -area: CCS -type: bug -issues: - - 116914 diff --git a/docs/changelog/117189.yaml b/docs/changelog/117189.yaml deleted file mode 100644 index e89c2d81506d9..0000000000000 --- a/docs/changelog/117189.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117189 -summary: Fix deberta tokenizer bug caused by bug in normalizer -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/117213.yaml b/docs/changelog/117213.yaml deleted file mode 100644 index 3b4cd0cee966c..0000000000000 --- a/docs/changelog/117213.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117213 -summary: Fix reconstituting version string from components -area: Ingest Node -type: bug -issues: - - 116950 diff --git a/docs/changelog/117271.yaml b/docs/changelog/117271.yaml deleted file mode 100644 index 1a328279b9635..0000000000000 --- a/docs/changelog/117271.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117271 -summary: Don't skip shards in coord rewrite if timestamp is an alias -area: Search -type: bug -issues: [] diff --git a/docs/changelog/117294.yaml b/docs/changelog/117294.yaml deleted file mode 100644 index f6e80690de7ff..0000000000000 --- a/docs/changelog/117294.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117294 -summary: Always Emit Inference ID in Semantic Text Mapping -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/117297.yaml b/docs/changelog/117297.yaml deleted file mode 100644 index 4a0051bbae644..0000000000000 --- a/docs/changelog/117297.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117297 -summary: Fix CCS exchange when multi cluster aliases point to same cluster -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117312.yaml b/docs/changelog/117312.yaml deleted file mode 100644 index 302b91388ef2b..0000000000000 --- a/docs/changelog/117312.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117312 -summary: Add missing `async_search` query parameters to rest-api-spec -area: Search -type: bug -issues: [] diff --git a/docs/changelog/117316.yaml b/docs/changelog/117316.yaml deleted file mode 100644 index 69474d68a8190..0000000000000 --- a/docs/changelog/117316.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117316 -summary: Fix validation of SORT by aggregate functions -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117350.yaml b/docs/changelog/117350.yaml deleted file mode 100644 index dca54f2037a87..0000000000000 --- a/docs/changelog/117350.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117350 -summary: "Improve halfbyte transposition performance, marginally improving bbq performance" -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/117404.yaml b/docs/changelog/117404.yaml deleted file mode 100644 index 0bab171956ca9..0000000000000 --- a/docs/changelog/117404.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117404 -summary: Correct bit * byte and bit * float script comparisons -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/117451.yaml b/docs/changelog/117451.yaml new file mode 100644 index 0000000000000..bda0ca59e0953 --- /dev/null +++ b/docs/changelog/117451.yaml @@ -0,0 +1,6 @@ +pr: 117451 +summary: ST_EXTENT aggregation +area: ES|QL +type: feature +issues: + - 104659 diff --git a/docs/changelog/117503.yaml b/docs/changelog/117503.yaml deleted file mode 100644 index d48741262b581..0000000000000 --- a/docs/changelog/117503.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117503 -summary: Fix COUNT filter pushdown -area: ES|QL -type: bug -issues: - - 115522 diff --git a/docs/changelog/117551.yaml b/docs/changelog/117551.yaml deleted file mode 100644 index 081dd9203d82a..0000000000000 --- a/docs/changelog/117551.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117551 -summary: Fix stats by constant expresson with alias -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117575.yaml b/docs/changelog/117575.yaml deleted file mode 100644 index 781444ae97be5..0000000000000 --- a/docs/changelog/117575.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117575 -summary: Fix enrich cache size setting name -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/117595.yaml b/docs/changelog/117595.yaml deleted file mode 100644 index 9360c372ac97e..0000000000000 --- a/docs/changelog/117595.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117595 -summary: Fix for Deberta tokenizer when input sequence exceeds 512 tokens -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/117657.yaml b/docs/changelog/117657.yaml deleted file mode 100644 index 0a72e9dabe9e8..0000000000000 --- a/docs/changelog/117657.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117657 -summary: Ignore cancellation exceptions -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117762.yaml b/docs/changelog/117762.yaml deleted file mode 100644 index 123432e0f0507..0000000000000 --- a/docs/changelog/117762.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117762 -summary: "Parse the contents of dynamic objects for [subobjects:false]" -area: Mapping -type: bug -issues: - - 117544 diff --git a/docs/changelog/117792.yaml b/docs/changelog/117792.yaml deleted file mode 100644 index 2d7ddda1ace40..0000000000000 --- a/docs/changelog/117792.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117792 -summary: Address mapping and compute engine runtime field issues -area: Mapping -type: bug -issues: - - 117644 diff --git a/docs/changelog/117839.yaml b/docs/changelog/117839.yaml new file mode 100644 index 0000000000000..98c97b5078c02 --- /dev/null +++ b/docs/changelog/117839.yaml @@ -0,0 +1,5 @@ +pr: 117839 +summary: Add match support for `semantic_text` fields +area: "Search" +type: enhancement +issues: [] diff --git a/docs/changelog/117842.yaml b/docs/changelog/117842.yaml deleted file mode 100644 index 9b528a158288c..0000000000000 --- a/docs/changelog/117842.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117842 -summary: Limit size of `Literal#toString` -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117865.yaml b/docs/changelog/117865.yaml deleted file mode 100644 index 33dc497725f92..0000000000000 --- a/docs/changelog/117865.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117865 -summary: Fix BWC for ES|QL cluster request -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117914.yaml b/docs/changelog/117914.yaml deleted file mode 100644 index da58ed7bb04b7..0000000000000 --- a/docs/changelog/117914.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117914 -summary: Fix for propagating filters from compound to inner retrievers -area: Ranking -type: bug -issues: [] diff --git a/docs/changelog/117920.yaml b/docs/changelog/117920.yaml deleted file mode 100644 index 1bfddabd4462d..0000000000000 --- a/docs/changelog/117920.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117920 -summary: Wait for the worker service to shutdown before closing task processor -area: Machine Learning -type: bug -issues: - - 117563 diff --git a/docs/changelog/117953.yaml b/docs/changelog/117953.yaml deleted file mode 100644 index 62f0218b1cdc7..0000000000000 --- a/docs/changelog/117953.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117953 -summary: Acquire stats searcher for data stream stats -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/118035.yaml b/docs/changelog/118035.yaml new file mode 100644 index 0000000000000..fdeaa184723b9 --- /dev/null +++ b/docs/changelog/118035.yaml @@ -0,0 +1,6 @@ +pr: 118035 +summary: Include hidden indices in `DeprecationInfoAction` +area: Indices APIs +type: bug +issues: + - 118020 diff --git a/docs/changelog/118102.yaml b/docs/changelog/118102.yaml new file mode 100644 index 0000000000000..e5ec32cdddbec --- /dev/null +++ b/docs/changelog/118102.yaml @@ -0,0 +1,5 @@ +pr: 118102 +summary: "ESQL: Enterprise license enforcement for CCS" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/118173.yaml b/docs/changelog/118173.yaml new file mode 100644 index 0000000000000..a3c9054674ba5 --- /dev/null +++ b/docs/changelog/118173.yaml @@ -0,0 +1,5 @@ +pr: 118173 +summary: ES|QL categorize with multiple groupings +area: Machine Learning +type: feature +issues: [] diff --git a/docs/changelog/118194.yaml b/docs/changelog/118194.yaml new file mode 100644 index 0000000000000..0e5eca55d597c --- /dev/null +++ b/docs/changelog/118194.yaml @@ -0,0 +1,5 @@ +pr: 118194 +summary: Retry on `ClusterBlockException` on transform destination index +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/118291.yaml b/docs/changelog/118291.yaml new file mode 100644 index 0000000000000..8001b3972e876 --- /dev/null +++ b/docs/changelog/118291.yaml @@ -0,0 +1,5 @@ +pr: 118291 +summary: Adding a migration reindex cancel API +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/118354.yaml b/docs/changelog/118354.yaml deleted file mode 100644 index e2d72db121276..0000000000000 --- a/docs/changelog/118354.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118354 -summary: Fix log message format bugs -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/118370.yaml b/docs/changelog/118370.yaml deleted file mode 100644 index e6a429448e493..0000000000000 --- a/docs/changelog/118370.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 118370 -summary: Fix concurrency issue with `ReinitializingSourceProvider` -area: Mapping -type: bug -issues: - - 118238 diff --git a/docs/changelog/118378.yaml b/docs/changelog/118378.yaml deleted file mode 100644 index d6c388b671968..0000000000000 --- a/docs/changelog/118378.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118378 -summary: Opt into extra data stream resolution -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/118474.yaml b/docs/changelog/118474.yaml new file mode 100644 index 0000000000000..1b0c6942eb323 --- /dev/null +++ b/docs/changelog/118474.yaml @@ -0,0 +1,6 @@ +pr: 118474 +summary: Esql bucket function for date nanos +area: ES|QL +type: enhancement +issues: + - 118031 diff --git a/docs/changelog/118634.yaml b/docs/changelog/118634.yaml new file mode 100644 index 0000000000000..d798d94b72075 --- /dev/null +++ b/docs/changelog/118634.yaml @@ -0,0 +1,5 @@ +pr: 118634 +summary: "Add undeclared Azure settings, modify test to exercise them" +area: Snapshot/Restore +type: bug +issues: [] diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index ca6a7e489449b..a212c4e152b0e 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -71,7 +71,7 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] `_source`:: -(Optional, list) Set to `false` to disable source retrieval (default: `true`). +(Optional, list) Set to `true` to enable source retrieval (default: `false`). You can also specify a comma-separated list of the fields you want to retrieve. `_source_excludes`:: diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index c2c2508ad5de2..24b42a6efd831 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -17,6 +17,7 @@ The <> command supports these aggregate functions: * <> * <> * experimental:[] <> +* experimental:[] <> * <> * <> * <> @@ -33,6 +34,7 @@ include::layout/median_absolute_deviation.asciidoc[] include::layout/min.asciidoc[] include::layout/percentile.asciidoc[] include::layout/st_centroid_agg.asciidoc[] +include::layout/st_extent_agg.asciidoc[] include::layout/std_dev.asciidoc[] include::layout/sum.asciidoc[] include::layout/top.asciidoc[] diff --git a/docs/reference/esql/functions/description/categorize.asciidoc b/docs/reference/esql/functions/description/categorize.asciidoc index 32af0051e91c8..c956066ad53f3 100644 --- a/docs/reference/esql/functions/description/categorize.asciidoc +++ b/docs/reference/esql/functions/description/categorize.asciidoc @@ -8,4 +8,4 @@ Groups text messages into categories of similarly formatted text values. * can't be used within other expressions * can't be used with multiple groupings -* can't be used or referenced within aggregations +* can't be used or referenced within aggregate functions diff --git a/docs/reference/esql/functions/description/st_extent_agg.asciidoc b/docs/reference/esql/functions/description/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..a9e1acfb0e6fb --- /dev/null +++ b/docs/reference/esql/functions/description/st_extent_agg.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field. diff --git a/docs/reference/esql/functions/examples/st_extent_agg.asciidoc b/docs/reference/esql/functions/examples/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..179be82103641 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_extent_agg.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_extent_agg-airports] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_extent_agg-airports-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json index 660e1be49fda9..18802f5ff8fef 100644 --- a/docs/reference/esql/functions/kibana/definition/bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -310,6 +310,312 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "date_period", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "date", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "date", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "date", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "keyword", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "date", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "text", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "keyword", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "date", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "keyword", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "keyword", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "keyword", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "text", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "text", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "date", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "text", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "keyword", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "text", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "text", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "time_duration", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/st_extent_agg.json b/docs/reference/esql/functions/kibana/definition/st_extent_agg.json new file mode 100644 index 0000000000000..19afcc59e38a4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_extent_agg.json @@ -0,0 +1,61 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "st_extent_agg", + "description" : "Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + } + ], + "examples" : [ + "FROM airports\n| WHERE country == \"India\"\n| STATS extent = ST_EXTENT_AGG(location)" + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/term.json b/docs/reference/esql/functions/kibana/definition/term.json index d8bb61fd596a1..b0f129afd239c 100644 --- a/docs/reference/esql/functions/kibana/definition/term.json +++ b/docs/reference/esql/functions/kibana/definition/term.json @@ -78,7 +78,7 @@ } ], "examples" : [ - "from books \n| where term(author, \"gabriel\") \n| keep book_no, title\n| limit 3;" + "FROM books \n| WHERE TERM(author, \"gabriel\") \n| KEEP book_no, title\n| LIMIT 3;" ], "preview" : true, "snapshot_only" : true diff --git a/docs/reference/esql/functions/kibana/docs/st_extent_agg.md b/docs/reference/esql/functions/kibana/docs/st_extent_agg.md new file mode 100644 index 0000000000000..a2e307c5b2c55 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_extent_agg.md @@ -0,0 +1,12 @@ + + +### ST_EXTENT_AGG +Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field. + +``` +FROM airports +| WHERE country == "India" +| STATS extent = ST_EXTENT_AGG(location) +``` diff --git a/docs/reference/esql/functions/kibana/docs/term.md b/docs/reference/esql/functions/kibana/docs/term.md index 83e61a949208d..ffecd26d737f7 100644 --- a/docs/reference/esql/functions/kibana/docs/term.md +++ b/docs/reference/esql/functions/kibana/docs/term.md @@ -6,8 +6,8 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Performs a Term query on the specified field. Returns true if the provided term matches the row. ``` -from books -| where term(author, "gabriel") -| keep book_no, title -| limit 3; +FROM books +| WHERE TERM(author, "gabriel") +| KEEP book_no, title +| LIMIT 3; ``` diff --git a/docs/reference/esql/functions/layout/st_extent_agg.asciidoc b/docs/reference/esql/functions/layout/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..946bef661e70c --- /dev/null +++ b/docs/reference/esql/functions/layout/st_extent_agg.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_extent_agg]] +=== `ST_EXTENT_AGG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_extent_agg.svg[Embedded,opts=inline] + +include::../parameters/st_extent_agg.asciidoc[] +include::../description/st_extent_agg.asciidoc[] +include::../types/st_extent_agg.asciidoc[] +include::../examples/st_extent_agg.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/st_extent_agg.asciidoc b/docs/reference/esql/functions/parameters/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..8903aa1a472a3 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_extent_agg.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/signature/st_extent_agg.svg b/docs/reference/esql/functions/signature/st_extent_agg.svg new file mode 100644 index 0000000000000..bb19b68bfb08b --- /dev/null +++ b/docs/reference/esql/functions/signature/st_extent_agg.svg @@ -0,0 +1 @@ +ST_EXTENT_AGG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/bucket.asciidoc b/docs/reference/esql/functions/types/bucket.asciidoc index 172e84b6f7860..2e6985e6bc4ed 100644 --- a/docs/reference/esql/functions/types/bucket.asciidoc +++ b/docs/reference/esql/functions/types/bucket.asciidoc @@ -16,6 +16,17 @@ date | integer | text | date | date date | integer | text | keyword | date date | integer | text | text | date date | time_duration | | | date +date_nanos | date_period | | | date_nanos +date_nanos | integer | date | date | date_nanos +date_nanos | integer | date | keyword | date_nanos +date_nanos | integer | date | text | date_nanos +date_nanos | integer | keyword | date | date_nanos +date_nanos | integer | keyword | keyword | date_nanos +date_nanos | integer | keyword | text | date_nanos +date_nanos | integer | text | date | date_nanos +date_nanos | integer | text | keyword | date_nanos +date_nanos | integer | text | text | date_nanos +date_nanos | time_duration | | | date_nanos double | double | | | double double | integer | double | double | double double | integer | double | integer | double diff --git a/docs/reference/esql/functions/types/st_extent_agg.asciidoc b/docs/reference/esql/functions/types/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..c836aa1896f07 --- /dev/null +++ b/docs/reference/esql/functions/types/st_extent_agg.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | result +cartesian_point | cartesian_shape +cartesian_shape | cartesian_shape +geo_point | geo_shape +geo_shape | geo_shape +|=== diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index d9b8f8802a04b..73e2db6e45e34 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -113,7 +113,7 @@ Index mode supports the following values: `standard`::: Standard indexing with default settings. -`tsds`::: _(data streams only)_ Index mode optimized for storage of metrics. For more information, see <>. +`time_series`::: _(data streams only)_ Index mode optimized for storage of metrics. For more information, see <>. `logsdb`::: _(data streams only)_ Index mode optimized for <>. diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index cd06e6d7b2f64..bf7e2976bbe63 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -153,7 +153,12 @@ For further details, refer to the {ml-docs}/ml-nlp-elser.html[ELSER model docume [[inference-example-elastic-reranker]] ==== Elastic Rerank via the `elasticsearch` service -The following example shows how to create an {infer} endpoint called `my-elastic-rerank` to perform a `rerank` task type using the built-in Elastic Rerank cross-encoder model. +The following example shows how to create an {infer} endpoint called `my-elastic-rerank` to perform a `rerank` task type using the built-in {ml-docs}/ml-nlp-rerank.html[Elastic Rerank] cross-encoder model. + +[TIP] +==== +Refer to this https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/search/12-semantic-reranking-elastic-rerank.ipynb[Python notebook] for an end-to-end example using Elastic Rerank. +==== The API request below will automatically download the Elastic Rerank model if it isn't already downloaded and then deploy the model. Once deployed, the model can be used for semantic re-ranking with a <>. diff --git a/docs/reference/mapping/params/index-prefixes.asciidoc b/docs/reference/mapping/params/index-prefixes.asciidoc index a143c5531c81b..1d5e844467b6f 100644 --- a/docs/reference/mapping/params/index-prefixes.asciidoc +++ b/docs/reference/mapping/params/index-prefixes.asciidoc @@ -54,3 +54,30 @@ PUT my-index-000001 } } -------------------------------- + +`index_prefixes` parameter instructs {ES} to create a subfield "._index_prefix". This +field will be used to do fast prefix queries. When doing highlighting, add "._index_prefix" +subfield to the `matched_fields` parameter to highlight the main field based on the +found matches of the prefix field, like in the request below: + +[source,console] +-------------------------------- +GET my-index-000001/_search +{ + "query": { + "prefix": { + "full_name": { + "value": "ki" + } + } + }, + "highlight": { + "fields": { + "full_name": { + "matched_fields": ["full_name._index_prefix"] + } + } + } +} +-------------------------------- +// TEST[continued] diff --git a/docs/reference/mapping/types/search-as-you-type.asciidoc b/docs/reference/mapping/types/search-as-you-type.asciidoc index 3c71389f4cebb..c2673a614c265 100644 --- a/docs/reference/mapping/types/search-as-you-type.asciidoc +++ b/docs/reference/mapping/types/search-as-you-type.asciidoc @@ -97,11 +97,21 @@ GET my-index-000001/_search "my_field._3gram" ] } + }, + "highlight": { + "fields": { + "my_field": { + "matched_fields": ["my_field._index_prefix"] <1> + } + } } } -------------------------------------------------- // TEST[continued] +<1> Adding "my_field._index_prefix" to the `matched_fields` allows to highlight + "my_field" also based on matches from "my_field._index_prefix" field. + [source,console-result] -------------------------------------------------- { @@ -126,6 +136,11 @@ GET my-index-000001/_search "_score" : 0.8630463, "_source" : { "my_field" : "quick brown fox jump lazy dog" + }, + "highlight": { + "my_field": [ + "quick brown fox jump lazy dog" + ] } } ] diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index cb04d4fb6fbf1..f20e9148bf5e7 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -442,7 +442,12 @@ If the child retriever already specifies any filters, then this top-level filter [[text-similarity-reranker-retriever-example-elastic-rerank]] ==== Example: Elastic Rerank -This examples demonstrates how to deploy the Elastic Rerank model and use it to re-rank search results using the `text_similarity_reranker` retriever. +[TIP] +==== +Refer to this https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/search/12-semantic-reranking-elastic-rerank.ipynb[Python notebook] for an end-to-end example using Elastic Rerank. +==== + +This example demonstrates how to deploy the {ml-docs}/ml-nlp-rerank.html[Elastic Rerank] model and use it to re-rank search results using the `text_similarity_reranker` retriever. Follow these steps: diff --git a/docs/reference/search/search-your-data/retrievers-examples.asciidoc b/docs/reference/search/search-your-data/retrievers-examples.asciidoc index ad1cc32dcee01..5cada8960aeab 100644 --- a/docs/reference/search/search-your-data/retrievers-examples.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-examples.asciidoc @@ -1,9 +1,8 @@ [[retrievers-examples]] +=== Retrievers examples Learn how to combine different retrievers in these hands-on examples. -=== Retrievers examples - [discrete] [[retrievers-examples-setup]] ==== Add example data diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index ad0f14bcf4478..a6b8a31fc3894 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -13,7 +13,11 @@ import java.net.URLStreamHandlerFactory; public interface EntitlementChecker { - void check$java_lang_System$exit(Class callerClass, int status); + + // Exit the JVM process + void check$$exit(Class callerClass, Runtime runtime, int status); + + void check$$halt(Class callerClass, Runtime runtime, int status); // URLClassLoader ctor void check$java_net_URLClassLoader$(Class callerClass, URL[] urls); diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java index e63fa4f3b726b..1ac4a7506eacb 100644 --- a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java @@ -47,14 +47,21 @@ static CheckAction serverAndPlugin(Runnable action) { } private static final Map checkActions = Map.ofEntries( - entry("system_exit", CheckAction.serverOnly(RestEntitlementsCheckAction::systemExit)), + entry("runtime_exit", CheckAction.serverOnly(RestEntitlementsCheckAction::runtimeExit)), + entry("runtime_halt", CheckAction.serverOnly(RestEntitlementsCheckAction::runtimeHalt)), entry("create_classloader", CheckAction.serverAndPlugin(RestEntitlementsCheckAction::createClassLoader)) ); - @SuppressForbidden(reason = "Specifically testing System.exit") - private static void systemExit() { - logger.info("Calling System.exit(123);"); - System.exit(123); + @SuppressForbidden(reason = "Specifically testing Runtime.exit") + private static void runtimeExit() { + logger.info("Calling Runtime.exit;"); + Runtime.getRuntime().exit(123); + } + + @SuppressForbidden(reason = "Specifically testing Runtime.halt") + private static void runtimeHalt() { + logger.info("Calling Runtime.halt;"); + Runtime.getRuntime().halt(123); } private static void createClassLoader() { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index aa63b630ed7cd..a5ca0543ad15a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -28,7 +28,12 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { } @Override - public void check$java_lang_System$exit(Class callerClass, int status) { + public void check$$exit(Class callerClass, Runtime runtime, int status) { + policyManager.checkExitVM(callerClass); + } + + @Override + public void check$$halt(Class callerClass, Runtime runtime, int status) { policyManager.checkExitVM(callerClass); } diff --git a/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java index d0f9f4f48609c..912d76ecfc01a 100644 --- a/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java @@ -19,8 +19,8 @@ public Java23ElasticsearchEntitlementChecker(PolicyManager policyManager) { } @Override - public void check$java_lang_System$exit(Class callerClass, int status) { + public void check$$exit(Class callerClass, Runtime runtime, int status) { // TODO: this is just an example, we shouldn't really override a method implemented in the superclass - super.check$java_lang_System$exit(callerClass, status); + super.check$$exit(callerClass, runtime, status); } } diff --git a/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java new file mode 100644 index 0000000000000..cd049a91fa4da --- /dev/null +++ b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools; + +import java.util.Arrays; +import java.util.EnumSet; +import java.util.stream.Collectors; + +public enum ExternalAccess { + PUBLIC_CLASS, + PUBLIC_METHOD, + PROTECTED_METHOD; + + private static final String DELIMITER = ":"; + + public static String toString(EnumSet externalAccesses) { + return externalAccesses.stream().map(Enum::toString).collect(Collectors.joining(DELIMITER)); + } + + public static EnumSet fromPermissions( + boolean packageExported, + boolean publicClass, + boolean publicMethod, + boolean protectedMethod + ) { + if (publicMethod && protectedMethod) { + throw new IllegalArgumentException(); + } + + EnumSet externalAccesses = EnumSet.noneOf(ExternalAccess.class); + if (publicMethod) { + externalAccesses.add(ExternalAccess.PUBLIC_METHOD); + } else if (protectedMethod) { + externalAccesses.add(ExternalAccess.PROTECTED_METHOD); + } + + if (packageExported && publicClass) { + externalAccesses.add(ExternalAccess.PUBLIC_CLASS); + } + return externalAccesses; + } + + public static boolean isExternallyAccessible(EnumSet access) { + return access.contains(ExternalAccess.PUBLIC_CLASS) + && (access.contains(ExternalAccess.PUBLIC_METHOD) || access.contains(ExternalAccess.PROTECTED_METHOD)); + } + + public static EnumSet fromString(String accessAsString) { + if ("PUBLIC".equals(accessAsString)) { + return EnumSet.of(ExternalAccess.PUBLIC_CLASS, ExternalAccess.PUBLIC_METHOD); + } + if ("PUBLIC-METHOD".equals(accessAsString)) { + return EnumSet.of(ExternalAccess.PUBLIC_METHOD); + } + if ("PRIVATE".equals(accessAsString)) { + return EnumSet.noneOf(ExternalAccess.class); + } + + return EnumSet.copyOf(Arrays.stream(accessAsString.split(DELIMITER)).map(ExternalAccess::valueOf).toList()); + } +} diff --git a/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java index c72e550a529cd..c6a71f55db4c6 100644 --- a/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java +++ b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java @@ -11,16 +11,28 @@ import java.io.IOException; import java.lang.module.ModuleDescriptor; +import java.net.URI; import java.nio.file.FileSystem; +import java.nio.file.FileSystems; import java.nio.file.Files; +import java.nio.file.Path; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; public class Utils { - public static Map> findModuleExports(FileSystem fs) throws IOException { + private static final Set EXCLUDED_MODULES = Set.of( + "java.desktop", + "jdk.jartool", + "jdk.jdi", + "java.security.jgss", + "jdk.jshell" + ); + + private static Map> findModuleExports(FileSystem fs) throws IOException { var modulesExports = new HashMap>(); try (var stream = Files.walk(fs.getPath("modules"))) { stream.filter(p -> p.getFileName().toString().equals("module-info.class")).forEach(x -> { @@ -42,4 +54,27 @@ public static Map> findModuleExports(FileSystem fs) throws I return modulesExports; } + public interface JdkModuleConsumer { + void accept(String moduleName, List moduleClasses, Set moduleExports); + } + + public static void walkJdkModules(JdkModuleConsumer c) throws IOException { + + FileSystem fs = FileSystems.getFileSystem(URI.create("jrt:/")); + + var moduleExports = Utils.findModuleExports(fs); + + try (var stream = Files.walk(fs.getPath("modules"))) { + var modules = stream.filter(x -> x.toString().endsWith(".class")) + .collect(Collectors.groupingBy(x -> x.subpath(1, 2).toString())); + + for (var kv : modules.entrySet()) { + var moduleName = kv.getKey(); + if (Utils.EXCLUDED_MODULES.contains(moduleName) == false) { + var thisModuleExports = moduleExports.get(moduleName); + c.accept(moduleName, kv.getValue(), thisModuleExports); + } + } + } + } } diff --git a/libs/entitlement/tools/public-callers-finder/README.md b/libs/entitlement/tools/public-callers-finder/README.md new file mode 100644 index 0000000000000..794576b3409a8 --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/README.md @@ -0,0 +1,50 @@ +This tool scans the JDK on which it is running. It takes a list of methods (compatible with the output of the `securitymanager-scanner` tool), and looks for the "public surface" of these methods (i.e. any class/method accessible from regular Java code that calls into the original list, directly or transitively). + +It acts basically as a recursive "Find Usages" in Intellij, stopping at the first fully accessible point (public method on a public class). +The tool scans every method in every class inside the same java module; e.g. +if you have a private method `File#normalizedList`, it will scan `java.base` to find +public methods like `File#list(String)`, `File#list(FilenameFilter, String)` and +`File#listFiles(File)`. + +The tool considers implemented interfaces (directly); e.g. if we're looking at a +method `C.m`, where `C implements I`, it will look for calls to `I.m`. It will +also consider (indirectly) calls to `S.m` (where `S` is a supertype of `C`), as +it treats calls to `super` in `S.m` as regular calls (e.g. `example() -> S.m() -> C.m()`). + + +In order to run the tool, use: +```shell +./gradlew :libs:entitlement:tools:public-callers-finder:run [] +``` +Where `input-file` is a CSV file (columns separated by `TAB`) that contains the following columns: +Module name +1. unused +2. unused +3. unused +4. Fully qualified class name (ASM style, with `/` separators) +5. Method name +6. Method descriptor (ASM signature) +7. Visibility (PUBLIC/PUBLIC-METHOD/PRIVATE) + +And `bubble-up-from-public` is a boolean (`true|false`) indicating if the code should stop at the first public method (`false`: default, recommended) or continue to find usages recursively even after reaching the "public surface". + +The output of the tool is another CSV file, with one line for each entry-point, columns separated by `TAB` + +1. Module name +2. File name (from source root) +3. Line number +4. Fully qualified class name (ASM style, with `/` separators) +5. Method name +6. Method descriptor (ASM signature) +7. Visibility (PUBLIC/PUBLIC-METHOD/PRIVATE) +8. Original caller Module name +9. Original caller Class name (ASM style, with `/` separators) +10. Original caller Method name +11. Original caller Visibility + +Examples: +``` +java.base DeleteOnExitHook.java 50 java/io/DeleteOnExitHook$1 run ()V PUBLIC java.base java/io/File delete PUBLIC +java.base ZipFile.java 254 java/util/zip/ZipFile (Ljava/io/File;ILjava/nio/charset/Charset;)V PUBLIC java.base java/io/File delete PUBLIC +java.logging FileHandler.java 279 java/util/logging/FileHandler ()V PUBLIC java.base java/io/File delete PUBLIC +``` diff --git a/libs/entitlement/tools/public-callers-finder/build.gradle b/libs/entitlement/tools/public-callers-finder/build.gradle new file mode 100644 index 0000000000000..083b1a43b9794 --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/build.gradle @@ -0,0 +1,61 @@ +plugins { + id 'application' +} + +apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.publish' + +tasks.named("dependencyLicenses").configure { + mapping from: /asm-.*/, to: 'asm' +} + +group = 'org.elasticsearch.entitlement.tools' + +ext { + javaMainClass = "org.elasticsearch.entitlement.tools.publiccallersfinder.Main" +} + +application { + mainClass.set(javaMainClass) + applicationDefaultJvmArgs = [ + '--add-exports', 'java.base/sun.security.util=ALL-UNNAMED', + '--add-opens', 'java.base/java.lang=ALL-UNNAMED', + '--add-opens', 'java.base/java.net=ALL-UNNAMED', + '--add-opens', 'java.base/java.net.spi=ALL-UNNAMED', + '--add-opens', 'java.base/java.util.concurrent=ALL-UNNAMED', + '--add-opens', 'java.base/javax.crypto=ALL-UNNAMED', + '--add-opens', 'java.base/javax.security.auth=ALL-UNNAMED', + '--add-opens', 'java.base/jdk.internal.logger=ALL-UNNAMED', + '--add-opens', 'java.base/sun.nio.ch=ALL-UNNAMED', + '--add-opens', 'jdk.management.jfr/jdk.management.jfr=ALL-UNNAMED', + '--add-opens', 'java.logging/java.util.logging=ALL-UNNAMED', + '--add-opens', 'java.logging/sun.util.logging.internal=ALL-UNNAMED', + '--add-opens', 'java.naming/javax.naming.ldap.spi=ALL-UNNAMED', + '--add-opens', 'java.rmi/sun.rmi.runtime=ALL-UNNAMED', + '--add-opens', 'jdk.dynalink/jdk.dynalink=ALL-UNNAMED', + '--add-opens', 'jdk.dynalink/jdk.dynalink.linker=ALL-UNNAMED', + '--add-opens', 'java.desktop/sun.awt=ALL-UNNAMED', + '--add-opens', 'java.sql.rowset/javax.sql.rowset.spi=ALL-UNNAMED', + '--add-opens', 'java.sql/java.sql=ALL-UNNAMED', + '--add-opens', 'java.xml.crypto/com.sun.org.apache.xml.internal.security.utils=ALL-UNNAMED' + ] +} + +repositories { + mavenCentral() +} + +dependencies { + compileOnly(project(':libs:core')) + implementation 'org.ow2.asm:asm:9.7.1' + implementation 'org.ow2.asm:asm-util:9.7.1' + implementation(project(':libs:entitlement:tools:common')) +} + +tasks.named('forbiddenApisMain').configure { + replaceSignatureFiles 'jdk-signatures' +} + +tasks.named("thirdPartyAudit").configure { + ignoreMissingClasses() +} diff --git a/libs/entitlement/tools/public-callers-finder/licenses/asm-LICENSE.txt b/libs/entitlement/tools/public-callers-finder/licenses/asm-LICENSE.txt new file mode 100644 index 0000000000000..afb064f2f2666 --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/licenses/asm-LICENSE.txt @@ -0,0 +1,26 @@ +Copyright (c) 2012 France Télécom +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/libs/entitlement/tools/public-callers-finder/licenses/asm-NOTICE.txt b/libs/entitlement/tools/public-callers-finder/licenses/asm-NOTICE.txt new file mode 100644 index 0000000000000..8d1c8b69c3fce --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/licenses/asm-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/FindUsagesClassVisitor.java b/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/FindUsagesClassVisitor.java new file mode 100644 index 0000000000000..6f136d0977e3f --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/FindUsagesClassVisitor.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools.publiccallersfinder; + +import org.elasticsearch.entitlement.tools.ExternalAccess; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.Label; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Type; + +import java.lang.constant.ClassDesc; +import java.lang.reflect.AccessFlag; +import java.util.EnumSet; +import java.util.Set; + +import static org.objectweb.asm.Opcodes.ACC_PROTECTED; +import static org.objectweb.asm.Opcodes.ACC_PUBLIC; +import static org.objectweb.asm.Opcodes.ASM9; + +class FindUsagesClassVisitor extends ClassVisitor { + + private int classAccess; + private boolean accessibleViaInterfaces; + + record MethodDescriptor(String className, String methodName, String methodDescriptor) {} + + record EntryPoint( + String moduleName, + String source, + int line, + String className, + String methodName, + String methodDescriptor, + EnumSet access + ) {} + + interface CallerConsumer { + void accept(String source, int line, String className, String methodName, String methodDescriptor, EnumSet access); + } + + private final Set moduleExports; + private final MethodDescriptor methodToFind; + private final CallerConsumer callers; + private String className; + private String source; + + protected FindUsagesClassVisitor(Set moduleExports, MethodDescriptor methodToFind, CallerConsumer callers) { + super(ASM9); + this.moduleExports = moduleExports; + this.methodToFind = methodToFind; + this.callers = callers; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + this.className = name; + this.classAccess = access; + if (interfaces.length > 0) { + this.accessibleViaInterfaces = findAccessibility(interfaces, moduleExports); + } + } + + private static boolean findAccessibility(String[] interfaces, Set moduleExports) { + var accessibleViaInterfaces = false; + for (var interfaceName : interfaces) { + if (moduleExports.contains(getPackageName(interfaceName))) { + var interfaceType = Type.getObjectType(interfaceName); + try { + var clazz = Class.forName(interfaceType.getClassName()); + if (clazz.accessFlags().contains(AccessFlag.PUBLIC)) { + accessibleViaInterfaces = true; + } + } catch (ClassNotFoundException ignored) {} + } + } + return accessibleViaInterfaces; + } + + @Override + public void visitSource(String source, String debug) { + super.visitSource(source, debug); + this.source = source; + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + return new FindUsagesMethodVisitor(super.visitMethod(access, name, descriptor, signature, exceptions), name, descriptor, access); + } + + private static String getPackageName(String className) { + return ClassDesc.ofInternalName(className).packageName(); + } + + private class FindUsagesMethodVisitor extends MethodVisitor { + + private final String methodName; + private int line; + private final String methodDescriptor; + private final int methodAccess; + + protected FindUsagesMethodVisitor(MethodVisitor mv, String methodName, String methodDescriptor, int methodAccess) { + super(ASM9, mv); + this.methodName = methodName; + this.methodDescriptor = methodDescriptor; + this.methodAccess = methodAccess; + } + + @Override + public void visitMethodInsn(int opcode, String owner, String name, String descriptor, boolean isInterface) { + super.visitMethodInsn(opcode, owner, name, descriptor, isInterface); + + if (methodToFind.className.equals(owner)) { + if (methodToFind.methodName.equals(name)) { + if (methodToFind.methodDescriptor == null || methodToFind.methodDescriptor.equals(descriptor)) { + EnumSet externalAccess = ExternalAccess.fromPermissions( + moduleExports.contains(getPackageName(className)), + accessibleViaInterfaces || (classAccess & ACC_PUBLIC) != 0, + (methodAccess & ACC_PUBLIC) != 0, + (methodAccess & ACC_PROTECTED) != 0 + ); + callers.accept(source, line, className, methodName, methodDescriptor, externalAccess); + } + } + } + } + + @Override + public void visitLineNumber(int line, Label start) { + super.visitLineNumber(line, start); + this.line = line; + } + } +} diff --git a/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/Main.java b/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/Main.java new file mode 100644 index 0000000000000..60b3a3c9f3c8e --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/Main.java @@ -0,0 +1,197 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools.publiccallersfinder; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.tools.ExternalAccess; +import org.elasticsearch.entitlement.tools.Utils; +import org.objectweb.asm.ClassReader; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collection; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class Main { + + private static final String SEPARATOR = "\t"; + + record CallChain(FindUsagesClassVisitor.EntryPoint entryPoint, CallChain next) {} + + interface UsageConsumer { + void usageFound(CallChain originalEntryPoint, CallChain newMethod); + } + + private static void findTransitiveUsages( + Collection firstLevelCallers, + List classesToScan, + Set moduleExports, + boolean bubbleUpFromPublic, + UsageConsumer usageConsumer + ) { + for (var caller : firstLevelCallers) { + var methodsToCheck = new ArrayDeque<>(Set.of(caller)); + var methodsSeen = new HashSet(); + + while (methodsToCheck.isEmpty() == false) { + var methodToCheck = methodsToCheck.removeFirst(); + var m = methodToCheck.entryPoint(); + var visitor2 = new FindUsagesClassVisitor( + moduleExports, + new FindUsagesClassVisitor.MethodDescriptor(m.className(), m.methodName(), m.methodDescriptor()), + (source, line, className, methodName, methodDescriptor, access) -> { + var newMethod = new CallChain( + new FindUsagesClassVisitor.EntryPoint( + m.moduleName(), + source, + line, + className, + methodName, + methodDescriptor, + access + ), + methodToCheck + ); + + var notSeenBefore = methodsSeen.add(newMethod.entryPoint()); + if (notSeenBefore) { + if (ExternalAccess.isExternallyAccessible(access)) { + usageConsumer.usageFound(caller.next(), newMethod); + } + if (access.contains(ExternalAccess.PUBLIC_METHOD) == false || bubbleUpFromPublic) { + methodsToCheck.add(newMethod); + } + } + } + ); + + for (var classFile : classesToScan) { + try { + ClassReader cr = new ClassReader(Files.newInputStream(classFile)); + cr.accept(visitor2, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + } + } + + private static void identifyTopLevelEntryPoints( + FindUsagesClassVisitor.MethodDescriptor methodToFind, + String methodToFindModule, + EnumSet methodToFindAccess, + boolean bubbleUpFromPublic + ) throws IOException { + + Utils.walkJdkModules((moduleName, moduleClasses, moduleExports) -> { + var originalCallers = new ArrayList(); + var visitor = new FindUsagesClassVisitor( + moduleExports, + methodToFind, + (source, line, className, methodName, methodDescriptor, access) -> originalCallers.add( + new CallChain( + new FindUsagesClassVisitor.EntryPoint(moduleName, source, line, className, methodName, methodDescriptor, access), + new CallChain( + new FindUsagesClassVisitor.EntryPoint( + methodToFindModule, + "", + 0, + methodToFind.className(), + methodToFind.methodName(), + methodToFind.methodDescriptor(), + methodToFindAccess + ), + null + ) + ) + ) + ); + + for (var classFile : moduleClasses) { + try { + ClassReader cr = new ClassReader(Files.newInputStream(classFile)); + cr.accept(visitor, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + originalCallers.stream().filter(c -> ExternalAccess.isExternallyAccessible(c.entryPoint().access())).forEach(c -> { + var originalCaller = c.next(); + printRow(getEntryPointString(c.entryPoint().moduleName(), c.entryPoint()), getOriginalEntryPointString(originalCaller)); + }); + var firstLevelCallers = bubbleUpFromPublic ? originalCallers : originalCallers.stream().filter(Main::isNotFullyPublic).toList(); + + if (firstLevelCallers.isEmpty() == false) { + findTransitiveUsages( + firstLevelCallers, + moduleClasses, + moduleExports, + bubbleUpFromPublic, + (originalEntryPoint, newMethod) -> printRow( + getEntryPointString(moduleName, newMethod.entryPoint()), + getOriginalEntryPointString(originalEntryPoint) + ) + ); + } + }); + } + + private static boolean isNotFullyPublic(CallChain c) { + return (c.entryPoint().access().contains(ExternalAccess.PUBLIC_CLASS) + && c.entryPoint().access().contains(ExternalAccess.PUBLIC_METHOD)) == false; + } + + @SuppressForbidden(reason = "This tool prints the CSV to stdout") + private static void printRow(String entryPointString, String originalEntryPoint) { + System.out.println(entryPointString + SEPARATOR + originalEntryPoint); + } + + private static String getEntryPointString(String moduleName, FindUsagesClassVisitor.EntryPoint e) { + return moduleName + SEPARATOR + e.source() + SEPARATOR + e.line() + SEPARATOR + e.className() + SEPARATOR + e.methodName() + + SEPARATOR + e.methodDescriptor() + SEPARATOR + ExternalAccess.toString(e.access()); + } + + private static String getOriginalEntryPointString(CallChain originalCallChain) { + return originalCallChain.entryPoint().moduleName() + SEPARATOR + originalCallChain.entryPoint().className() + SEPARATOR + + originalCallChain.entryPoint().methodName() + SEPARATOR + ExternalAccess.toString(originalCallChain.entryPoint().access()); + } + + interface MethodDescriptorConsumer { + void accept(FindUsagesClassVisitor.MethodDescriptor methodDescriptor, String moduleName, EnumSet access) + throws IOException; + } + + private static void parseCsv(Path csvPath, MethodDescriptorConsumer methodConsumer) throws IOException { + var lines = Files.readAllLines(csvPath); + for (var l : lines) { + var tokens = l.split(SEPARATOR); + var moduleName = tokens[0]; + var className = tokens[3]; + var methodName = tokens[4]; + var methodDescriptor = tokens[5]; + var access = ExternalAccess.fromString(tokens[6]); + methodConsumer.accept(new FindUsagesClassVisitor.MethodDescriptor(className, methodName, methodDescriptor), moduleName, access); + } + } + + public static void main(String[] args) throws IOException { + var csvFilePath = Path.of(args[0]); + boolean bubbleUpFromPublic = args.length >= 2 && Boolean.parseBoolean(args[1]); + parseCsv(csvFilePath, (method, module, access) -> identifyTopLevelEntryPoints(method, module, access, bubbleUpFromPublic)); + } +} diff --git a/libs/entitlement/tools/securitymanager-scanner/src/README.md b/libs/entitlement/tools/securitymanager-scanner/README.md similarity index 100% rename from libs/entitlement/tools/securitymanager-scanner/src/README.md rename to libs/entitlement/tools/securitymanager-scanner/README.md diff --git a/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java b/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java index bea49e0296e67..7c2dd69d60f0c 100644 --- a/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java +++ b/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java @@ -10,47 +10,35 @@ package org.elasticsearch.entitlement.tools.securitymanager.scanner; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.tools.ExternalAccess; import org.elasticsearch.entitlement.tools.Utils; import org.objectweb.asm.ClassReader; import java.io.IOException; -import java.net.URI; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; import java.nio.file.Files; import java.util.HashMap; import java.util.List; -import java.util.Set; public class Main { - static final Set excludedModules = Set.of("java.desktop"); - private static void identifySMChecksEntryPoints() throws IOException { - FileSystem fs = FileSystems.getFileSystem(URI.create("jrt:/")); - - var moduleExports = Utils.findModuleExports(fs); - var callers = new HashMap>(); var visitor = new SecurityCheckClassVisitor(callers); - try (var stream = Files.walk(fs.getPath("modules"))) { - stream.filter(x -> x.toString().endsWith(".class")).forEach(x -> { - var moduleName = x.subpath(1, 2).toString(); - if (excludedModules.contains(moduleName) == false) { - try { - ClassReader cr = new ClassReader(Files.newInputStream(x)); - visitor.setCurrentModule(moduleName, moduleExports.get(moduleName)); - var path = x.getNameCount() > 3 ? x.subpath(2, x.getNameCount() - 1).toString() : ""; - visitor.setCurrentSourcePath(path); - cr.accept(visitor, 0); - } catch (IOException e) { - throw new RuntimeException(e); - } + Utils.walkJdkModules((moduleName, moduleClasses, moduleExports) -> { + for (var classFile : moduleClasses) { + try { + ClassReader cr = new ClassReader(Files.newInputStream(classFile)); + visitor.setCurrentModule(moduleName, moduleExports); + var path = classFile.getNameCount() > 3 ? classFile.subpath(2, classFile.getNameCount() - 1).toString() : ""; + visitor.setCurrentSourcePath(path); + cr.accept(visitor, 0); + } catch (IOException e) { + throw new RuntimeException(e); } - }); - } + } + }); printToStdout(callers); } @@ -68,16 +56,8 @@ private static void printToStdout(HashMap excludedClasses = Set.of(SECURITY_MANAGER_INTERNAL_NAME); - enum ExternalAccess { - CLASS, - METHOD - } - record CallerInfo( String moduleName, String source, @@ -208,15 +205,12 @@ public void visitMethodInsn(int opcode, String owner, String name, String descri || opcode == INVOKEDYNAMIC) { if (SECURITY_MANAGER_INTERNAL_NAME.equals(owner)) { - EnumSet externalAccesses = EnumSet.noneOf(ExternalAccess.class); - if (moduleExports.contains(getPackageName(className))) { - if ((methodAccess & ACC_PUBLIC) != 0) { - externalAccesses.add(ExternalAccess.METHOD); - } - if ((classAccess & ACC_PUBLIC) != 0) { - externalAccesses.add(ExternalAccess.CLASS); - } - } + EnumSet externalAccesses = ExternalAccess.fromPermissions( + moduleExports.contains(getPackageName(className)), + (classAccess & ACC_PUBLIC) != 0, + (methodAccess & ACC_PUBLIC) != 0, + (methodAccess & ACC_PROTECTED) != 0 + ); if (name.equals("checkPermission")) { var callers = callerInfoByMethod.computeIfAbsent(name, ignored -> new ArrayList<>()); diff --git a/libs/geo/src/main/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitor.java b/libs/geo/src/main/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitor.java index eee4a62c7d588..696be2808ed1f 100644 --- a/libs/geo/src/main/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitor.java +++ b/libs/geo/src/main/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitor.java @@ -83,10 +83,15 @@ public static Optional visitCartesian(Geometry geometry) { return Optional.empty(); } + public enum WrapLongitude { + NO_WRAP, + WRAP + } + /** * Determine the BBOX assuming the CRS is geographic (eg WGS84) and optionally wrapping the longitude around the dateline. */ - public static Optional visitGeo(Geometry geometry, boolean wrapLongitude) { + public static Optional visitGeo(Geometry geometry, WrapLongitude wrapLongitude) { var visitor = new SpatialEnvelopeVisitor(new GeoPointVisitor(wrapLongitude)); if (geometry.visit(visitor)) { return Optional.of(visitor.getResult()); @@ -181,40 +186,16 @@ public Rectangle getResult() { * */ public static class GeoPointVisitor implements PointVisitor { - private double minY = Double.POSITIVE_INFINITY; - private double maxY = Double.NEGATIVE_INFINITY; - private double minNegX = Double.POSITIVE_INFINITY; - private double maxNegX = Double.NEGATIVE_INFINITY; - private double minPosX = Double.POSITIVE_INFINITY; - private double maxPosX = Double.NEGATIVE_INFINITY; - - public double getMinY() { - return minY; - } - - public double getMaxY() { - return maxY; - } - - public double getMinNegX() { - return minNegX; - } + protected double minY = Double.POSITIVE_INFINITY; + protected double maxY = Double.NEGATIVE_INFINITY; + protected double minNegX = Double.POSITIVE_INFINITY; + protected double maxNegX = Double.NEGATIVE_INFINITY; + protected double minPosX = Double.POSITIVE_INFINITY; + protected double maxPosX = Double.NEGATIVE_INFINITY; - public double getMaxNegX() { - return maxNegX; - } - - public double getMinPosX() { - return minPosX; - } + private final WrapLongitude wrapLongitude; - public double getMaxPosX() { - return maxPosX; - } - - private final boolean wrapLongitude; - - public GeoPointVisitor(boolean wrapLongitude) { + public GeoPointVisitor(WrapLongitude wrapLongitude) { this.wrapLongitude = wrapLongitude; } @@ -253,32 +234,35 @@ public Rectangle getResult() { return getResult(minNegX, minPosX, maxNegX, maxPosX, maxY, minY, wrapLongitude); } - private static Rectangle getResult( + protected static Rectangle getResult( double minNegX, double minPosX, double maxNegX, double maxPosX, double maxY, double minY, - boolean wrapLongitude + WrapLongitude wrapLongitude ) { assert Double.isFinite(maxY); if (Double.isInfinite(minPosX)) { return new Rectangle(minNegX, maxNegX, maxY, minY); } else if (Double.isInfinite(minNegX)) { return new Rectangle(minPosX, maxPosX, maxY, minY); - } else if (wrapLongitude) { - double unwrappedWidth = maxPosX - minNegX; - double wrappedWidth = (180 - minPosX) - (-180 - maxNegX); - if (unwrappedWidth <= wrappedWidth) { - return new Rectangle(minNegX, maxPosX, maxY, minY); - } else { - return new Rectangle(minPosX, maxNegX, maxY, minY); - } } else { - return new Rectangle(minNegX, maxPosX, maxY, minY); + return switch (wrapLongitude) { + case NO_WRAP -> new Rectangle(minNegX, maxPosX, maxY, minY); + case WRAP -> maybeWrap(minNegX, minPosX, maxNegX, maxPosX, maxY, minY); + }; } } + + private static Rectangle maybeWrap(double minNegX, double minPosX, double maxNegX, double maxPosX, double maxY, double minY) { + double unwrappedWidth = maxPosX - minNegX; + double wrappedWidth = 360 + maxNegX - minPosX; + return unwrappedWidth <= wrappedWidth + ? new Rectangle(minNegX, maxPosX, maxY, minY) + : new Rectangle(minPosX, maxNegX, maxY, minY); + } } private boolean isValid() { diff --git a/libs/geo/src/test/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitorTests.java b/libs/geo/src/test/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitorTests.java index fc35df295e566..893a1700616a6 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitorTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitorTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; @@ -36,7 +37,7 @@ public void testVisitCartesianShape() { public void testVisitGeoShapeNoWrap() { for (int i = 0; i < 1000; i++) { var geometry = GeometryTestUtils.randomGeometryWithoutCircle(0, false); - var bbox = SpatialEnvelopeVisitor.visitGeo(geometry, false); + var bbox = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.NO_WRAP); assertNotNull(bbox); assertTrue(i + ": " + geometry, bbox.isPresent()); var result = bbox.get(); @@ -48,7 +49,8 @@ public void testVisitGeoShapeNoWrap() { public void testVisitGeoShapeWrap() { for (int i = 0; i < 1000; i++) { var geometry = GeometryTestUtils.randomGeometryWithoutCircle(0, true); - var bbox = SpatialEnvelopeVisitor.visitGeo(geometry, false); + // TODO this should be WRAP instead + var bbox = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.NO_WRAP); assertNotNull(bbox); assertTrue(i + ": " + geometry, bbox.isPresent()); var result = bbox.get(); @@ -81,7 +83,7 @@ public void testVisitCartesianPoints() { } public void testVisitGeoPointsNoWrapping() { - var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(false)); + var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(WrapLongitude.NO_WRAP)); double minY = Double.MAX_VALUE; double maxY = -Double.MAX_VALUE; double minX = Double.MAX_VALUE; @@ -103,7 +105,7 @@ public void testVisitGeoPointsNoWrapping() { } public void testVisitGeoPointsWrapping() { - var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(true)); + var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(WrapLongitude.WRAP)); double minY = Double.POSITIVE_INFINITY; double maxY = Double.NEGATIVE_INFINITY; double minNegX = Double.POSITIVE_INFINITY; @@ -145,7 +147,7 @@ public void testVisitGeoPointsWrapping() { } public void testWillCrossDateline() { - var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(true)); + var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(WrapLongitude.WRAP)); visitor.visit(new Point(-90.0, 0.0)); visitor.visit(new Point(90.0, 0.0)); assertCrossesDateline(visitor, false); diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index bc1f07fda6240..f3101890d8185 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesService; @@ -41,6 +42,7 @@ import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.BackgroundIndexer; +import org.elasticsearch.threadpool.ThreadPool; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -53,6 +55,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -75,6 +78,8 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg protected static final String DEFAULT_ACCOUNT_NAME = "account"; protected static final Predicate LIST_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+\\?.+").asMatchPredicate(); protected static final Predicate GET_BLOB_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+/.+").asMatchPredicate(); + private static final AtomicInteger MAX_CONNECTION_SETTING = new AtomicInteger(-1); + private static final AtomicInteger EVENT_LOOP_THREAD_COUNT_SETTING = new AtomicInteger(-1); @Override protected String repositoryType() { @@ -132,9 +137,17 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { // see com.azure.storage.blob.BlobUrlParts.parseIpUrl final String endpoint = "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=" + httpServerUrl() + "/" + accountName; + + // The first node configured sets these for all nodes + MAX_CONNECTION_SETTING.compareAndSet(-1, randomIntBetween(10, 30)); + EVENT_LOOP_THREAD_COUNT_SETTING.compareAndSet(-1, randomIntBetween(1, 3)); return Settings.builder() .put(super.nodeSettings(nodeOrdinal, otherSettings)) .put(AzureStorageSettings.ENDPOINT_SUFFIX_SETTING.getConcreteSettingForNamespace("test").getKey(), endpoint) + .put(AzureClientProvider.EVENT_LOOP_THREAD_COUNT.getKey(), EVENT_LOOP_THREAD_COUNT_SETTING.get()) + .put(AzureClientProvider.MAX_OPEN_CONNECTIONS.getKey(), MAX_CONNECTION_SETTING.get()) + .put(AzureClientProvider.MAX_IDLE_TIME.getKey(), TimeValue.timeValueSeconds(randomIntBetween(10, 30))) + .put(AzureClientProvider.OPEN_CONNECTION_TIMEOUT.getKey(), TimeValue.timeValueSeconds(randomIntBetween(10, 30))) .setSecureSettings(secureSettings) .build(); } @@ -262,6 +275,16 @@ private boolean isPutBlockList(String request) { } } + public void testSettingsTakeEffect() { + AzureClientProvider azureClientProvider = internalCluster().getInstance(AzureClientProvider.class); + assertEquals(MAX_CONNECTION_SETTING.get(), azureClientProvider.getConnectionProvider().maxConnections()); + ThreadPool nodeThreadPool = internalCluster().getInstance(ThreadPool.class); + assertEquals( + EVENT_LOOP_THREAD_COUNT_SETTING.get(), + nodeThreadPool.info(AzureRepositoryPlugin.NETTY_EVENT_LOOP_THREAD_POOL_NAME).getMax() + ); + } + public void testLargeBlobCountDeletion() throws Exception { int numberOfBlobs = randomIntBetween(257, 2000); try (BlobStore store = newBlobStore()) { diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java index f92bbcbdd716d..a9ae9db19a613 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java @@ -265,6 +265,11 @@ protected void doStop() { @Override protected void doClose() {} + // visible for testing + ConnectionProvider getConnectionProvider() { + return connectionProvider; + } + static class RequestMetrics { private volatile long totalRequestTimeNanos = 0; private volatile int requestCount; diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java index 4556e63378fea..3b945c8118804 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java @@ -97,6 +97,10 @@ AzureStorageService createAzureStorageService(Settings settingsToUse, AzureClien @Override public List> getSettings() { return Arrays.asList( + AzureClientProvider.EVENT_LOOP_THREAD_COUNT, + AzureClientProvider.MAX_OPEN_CONNECTIONS, + AzureClientProvider.OPEN_CONNECTION_TIMEOUT, + AzureClientProvider.MAX_IDLE_TIME, AzureStorageSettings.ACCOUNT_SETTING, AzureStorageSettings.KEY_SETTING, AzureStorageSettings.SAS_TOKEN_SETTING, diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java index dcdf52e963eef..bc41b9fd62ca9 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java @@ -44,7 +44,7 @@ public class RepositoryS3ImdsV1CredentialsRestIT extends AbstractRepositoryS3Res public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) - .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", ec2ImdsHttpFixture::getAddress) + .systemProperty(Ec2ImdsHttpFixture.ENDPOINT_OVERRIDE_SYSPROP_NAME, ec2ImdsHttpFixture::getAddress) .build(); @ClassRule diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java index 434fc9720fc29..34500ff5227f1 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java @@ -44,7 +44,7 @@ public class RepositoryS3ImdsV2CredentialsRestIT extends AbstractRepositoryS3Res public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) - .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", ec2ImdsHttpFixture::getAddress) + .systemProperty(Ec2ImdsHttpFixture.ENDPOINT_OVERRIDE_SYSPROP_NAME, ec2ImdsHttpFixture::getAddress) .build(); @ClassRule diff --git a/muted-tests.yml b/muted-tests.yml index b750c0777ce34..36dfc306b0147 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -174,7 +174,7 @@ tests: - class: "org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT" method: "test {scoring.*}" issue: https://github.com/elastic/elasticsearch/issues/117641 -- class: "org.elasticsearch.xpack.esql.qa.mixed.MultilusterEsqlSpecIT" +- class: "org.elasticsearch.xpack.esql.qa.mixed.MultiClusterEsqlSpecIT" method: "test {scoring.*}" issue: https://github.com/elastic/elasticsearch/issues/118460 - class: "org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT" @@ -314,6 +314,12 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test011SecurityEnabledStatus issue: https://github.com/elastic/elasticsearch/issues/118517 +- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests + method: testInvalidJSON + issue: https://github.com/elastic/elasticsearch/issues/116521 +- class: org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests + method: testPlanSanityCheckWithBinaryPlans + issue: https://github.com/elastic/elasticsearch/issues/118656 # Examples: # diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index d4b56015edaa8..95715217fa59a 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -7,6 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.internal-cluster-test' esplugin { description 'The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.' @@ -29,6 +30,8 @@ dependencies { javaRestTestImplementation project(':plugins:discovery-ec2') javaRestTestImplementation project(':test:fixtures:ec2-imds-fixture') + + internalClusterTestImplementation project(':test:fixtures:ec2-imds-fixture') } tasks.named("dependencyLicenses").configure { @@ -82,7 +85,7 @@ tasks.register("writeTestJavaPolicy") { } } -tasks.named("test").configure { +tasks.withType(Test).configureEach { dependsOn "writeTestJavaPolicy" // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name diff --git a/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2NetworkAddressesTestCase.java b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2NetworkAddressesTestCase.java new file mode 100644 index 0000000000000..b6a4845977b09 --- /dev/null +++ b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2NetworkAddressesTestCase.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; +import java.util.Collection; + +@ESIntegTestCase.ClusterScope(numDataNodes = 0) +public abstract class DiscoveryEc2NetworkAddressesTestCase extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopyNoNullElements(super.nodePlugins(), Ec2DiscoveryPlugin.class); + } + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + void verifyPublishAddress(String publishAddressSetting, String expectedAddress) throws IOException { + final var node = internalCluster().startNode(Settings.builder().put("http.publish_host", publishAddressSetting)); + assertEquals( + expectedAddress, + internalCluster().getInstance(HttpServerTransport.class, node).boundAddress().publishAddress().getAddress() + ); + internalCluster().stopNode(node); + } +} diff --git a/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2RegularNetworkAddressesIT.java b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2RegularNetworkAddressesIT.java new file mode 100644 index 0000000000000..491fa37a4c87d --- /dev/null +++ b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2RegularNetworkAddressesIT.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; +import fixture.aws.imds.Ec2ImdsVersion; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.transport.BindTransportException; + +import java.io.IOException; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.containsString; + +public class DiscoveryEc2RegularNetworkAddressesIT extends DiscoveryEc2NetworkAddressesTestCase { + public void testLocalIgnoresImds() { + Ec2ImdsHttpFixture.runWithFixture(new Ec2ImdsServiceBuilder(randomFrom(Ec2ImdsVersion.values())), imdsFixture -> { + try (var ignored = Ec2ImdsHttpFixture.withEc2MetadataServiceEndpointOverride(imdsFixture.getAddress())) { + verifyPublishAddress("_local_", "127.0.0.1"); + } + }); + } + + public void testImdsNotAvailable() throws IOException { + try (var ignored = Ec2ImdsHttpFixture.withEc2MetadataServiceEndpointOverride("http://127.0.0.1")) { + // if IMDS is not running, regular values like `_local_` should still work + verifyPublishAddress("_local_", "127.0.0.1"); + + // but EC2 addresses will cause the node to fail to start + final var assertionError = expectThrows( + AssertionError.class, + () -> internalCluster().startNode(Settings.builder().put("http.publish_host", "_ec2_")) + ); + final var executionException = asInstanceOf(ExecutionException.class, assertionError.getCause()); + final var bindTransportException = asInstanceOf(BindTransportException.class, executionException.getCause()); + assertEquals("Failed to resolve publish address", bindTransportException.getMessage()); + final var ioException = asInstanceOf(IOException.class, bindTransportException.getCause()); + assertThat(ioException.getMessage(), containsString("/latest/meta-data/local-ipv4")); + } + } +} diff --git a/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SpecialNetworkAddressesIT.java b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SpecialNetworkAddressesIT.java new file mode 100644 index 0000000000000..f541c4cdd979b --- /dev/null +++ b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SpecialNetworkAddressesIT.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; +import fixture.aws.imds.Ec2ImdsVersion; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import java.util.Map; +import java.util.stream.Stream; + +public class DiscoveryEc2SpecialNetworkAddressesIT extends DiscoveryEc2NetworkAddressesTestCase { + + private final String imdsAddressName; + private final String elasticsearchAddressName; + private final Ec2ImdsVersion imdsVersion; + + public DiscoveryEc2SpecialNetworkAddressesIT( + @Name("imdsAddressName") String imdsAddressName, + @Name("elasticsearchAddressName") String elasticsearchAddressName, + @Name("imdsVersion") Ec2ImdsVersion imdsVersion + ) { + this.imdsAddressName = imdsAddressName; + this.elasticsearchAddressName = elasticsearchAddressName; + this.imdsVersion = imdsVersion; + } + + @ParametersFactory + public static Iterable parameters() { + return Map.of( + "_ec2:privateIpv4_", + "local-ipv4", + "_ec2:privateDns_", + "local-hostname", + "_ec2:publicIpv4_", + "public-ipv4", + "_ec2:publicDns_", + "public-hostname", + "_ec2:publicIp_", + "public-ipv4", + "_ec2:privateIp_", + "local-ipv4", + "_ec2_", + "local-ipv4" + ) + .entrySet() + .stream() + .flatMap( + addresses -> Stream.of(Ec2ImdsVersion.values()) + .map(ec2ImdsVersion -> new Object[] { addresses.getValue(), addresses.getKey(), ec2ImdsVersion }) + ) + .toList(); + } + + public void testSpecialNetworkAddresses() { + final var publishAddress = "10.0." + between(0, 255) + "." + between(0, 255); + Ec2ImdsHttpFixture.runWithFixture( + new Ec2ImdsServiceBuilder(imdsVersion).addInstanceAddress(imdsAddressName, publishAddress), + imdsFixture -> { + try (var ignored = Ec2ImdsHttpFixture.withEc2MetadataServiceEndpointOverride(imdsFixture.getAddress())) { + verifyPublishAddress(elasticsearchAddressName, publishAddress); + } + } + ); + } + +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java index 602a98e17970d..73213090b6f93 100644 --- a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java @@ -9,6 +9,8 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.util.EC2MetadataUtils; + import org.elasticsearch.client.Request; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; @@ -29,6 +31,8 @@ protected String getTestRestCluster() { } public void testAvailabilityZoneAttribute() throws IOException { + assumeTrue("test only in non-AWS environment", EC2MetadataUtils.getInstanceId() == null); + final var nodesInfoResponse = assertOKAndCreateObjectPath(client().performRequest(new Request("GET", "/_nodes/_all/_none"))); for (final var nodeId : nodesInfoResponse.evaluateMapKeys("nodes")) { assertNull(nodesInfoResponse.evaluateExact("nodes", nodeId, "attributes", "aws_availability_zone")); diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeTestCase.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeTestCase.java index 7eb18eec5c0b9..178c5c3ad4cae 100644 --- a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeTestCase.java +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeTestCase.java @@ -9,6 +9,8 @@ package org.elasticsearch.discovery.ec2; +import fixture.aws.imds.Ec2ImdsHttpFixture; + import org.elasticsearch.client.Request; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -34,7 +36,7 @@ protected static ElasticsearchCluster buildCluster(Supplier imdsFixtureA return ElasticsearchCluster.local() .plugin("discovery-ec2") .setting(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), "true") - .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", imdsFixtureAddressSupplier) + .systemProperty(Ec2ImdsHttpFixture.ENDPOINT_OVERRIDE_SYSPROP_NAME, imdsFixtureAddressSupplier) .build(); } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java deleted file mode 100644 index 82787f53c9f76..0000000000000 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.discovery.ec2; - -import com.sun.net.httpserver.HttpServer; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.mocksocket.MockHttpServer; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.ESTestCase; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; - -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Arrays; -import java.util.Collections; -import java.util.function.BiConsumer; - -import static com.amazonaws.SDKGlobalConfiguration.EC2_METADATA_SERVICE_OVERRIDE_SYSTEM_PROPERTY; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.equalTo; - -/** - * Test for EC2 network.host settings. - *

- * Warning: This test doesn't assert that the exceptions are thrown. - * They aren't. - */ -@SuppressForbidden(reason = "use http server") -public class Ec2NetworkTests extends ESTestCase { - - private static HttpServer httpServer; - - @BeforeClass - public static void startHttp() throws Exception { - httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0); - - BiConsumer registerContext = (path, v) -> { - final byte[] message = v.getBytes(UTF_8); - httpServer.createContext(path, (s) -> { - s.sendResponseHeaders(RestStatus.OK.getStatus(), message.length); - OutputStream responseBody = s.getResponseBody(); - responseBody.write(message); - responseBody.close(); - }); - }; - registerContext.accept("/latest/meta-data/local-ipv4", "127.0.0.1"); - registerContext.accept("/latest/meta-data/public-ipv4", "165.168.10.2"); - registerContext.accept("/latest/meta-data/public-hostname", "165.168.10.3"); - registerContext.accept("/latest/meta-data/local-hostname", "10.10.10.5"); - - httpServer.start(); - } - - @Before - public void setup() { - // redirect EC2 metadata service to httpServer - AccessController.doPrivileged( - (PrivilegedAction) () -> System.setProperty( - EC2_METADATA_SERVICE_OVERRIDE_SYSTEM_PROPERTY, - "http://" + httpServer.getAddress().getHostName() + ":" + httpServer.getAddress().getPort() - ) - ); - } - - @AfterClass - public static void stopHttp() { - httpServer.stop(0); - httpServer = null; - } - - /** - * Test for network.host: _ec2_ - */ - public void testNetworkHostEc2() throws IOException { - resolveEc2("_ec2_", InetAddress.getByName("127.0.0.1")); - } - - /** - * Test for network.host: _ec2_ - */ - public void testNetworkHostUnableToResolveEc2() { - // redirect EC2 metadata service to unknown location - AccessController.doPrivileged( - (PrivilegedAction) () -> System.setProperty(EC2_METADATA_SERVICE_OVERRIDE_SYSTEM_PROPERTY, "http://127.0.0.1/") - ); - - try { - resolveEc2("_ec2_", (InetAddress[]) null); - } catch (IOException e) { - assertThat( - e.getMessage(), - equalTo("IOException caught when fetching InetAddress from [http://127.0.0.1//latest/meta-data/local-ipv4]") - ); - } - } - - /** - * Test for network.host: _ec2:publicIp_ - */ - public void testNetworkHostEc2PublicIp() throws IOException { - resolveEc2("_ec2:publicIp_", InetAddress.getByName("165.168.10.2")); - } - - /** - * Test for network.host: _ec2:privateIp_ - */ - public void testNetworkHostEc2PrivateIp() throws IOException { - resolveEc2("_ec2:privateIp_", InetAddress.getByName("127.0.0.1")); - } - - /** - * Test for network.host: _ec2:privateIpv4_ - */ - public void testNetworkHostEc2PrivateIpv4() throws IOException { - resolveEc2("_ec2:privateIpv4_", InetAddress.getByName("127.0.0.1")); - } - - /** - * Test for network.host: _ec2:privateDns_ - */ - public void testNetworkHostEc2PrivateDns() throws IOException { - resolveEc2("_ec2:privateDns_", InetAddress.getByName("10.10.10.5")); - } - - /** - * Test for network.host: _ec2:publicIpv4_ - */ - public void testNetworkHostEc2PublicIpv4() throws IOException { - resolveEc2("_ec2:publicIpv4_", InetAddress.getByName("165.168.10.2")); - } - - /** - * Test for network.host: _ec2:publicDns_ - */ - public void testNetworkHostEc2PublicDns() throws IOException { - resolveEc2("_ec2:publicDns_", InetAddress.getByName("165.168.10.3")); - } - - private InetAddress[] resolveEc2(String host, InetAddress... expected) throws IOException { - Settings nodeSettings = Settings.builder().put("network.host", host).build(); - - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); - - InetAddress[] addresses = networkService.resolveBindHostAddresses( - NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY) - ); - if (expected == null) { - fail("We should get an IOException, resolved addressed:" + Arrays.toString(addresses)); - } - assertThat(addresses, arrayContaining(expected)); - return addresses; - } - - /** - * Test that we don't have any regression with network host core settings such as - * network.host: _local_ - */ - public void testNetworkHostCoreLocal() throws IOException { - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); - InetAddress[] addresses = networkService.resolveBindHostAddresses(null); - assertThat(addresses, arrayContaining(networkService.resolveBindHostAddresses(new String[] { "_local_" }))); - } -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/migrate.cancel_reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/migrate.cancel_reindex.json new file mode 100644 index 0000000000000..a034f204edbfb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/migrate.cancel_reindex.json @@ -0,0 +1,30 @@ +{ + "migrate.cancel_reindex":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "description":"This API returns the status of a migration reindex attempt for a data stream or index" + }, + "stability":"experimental", + "visibility":"private", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_migration/reindex/{index}/_cancel", + "methods":[ + "POST" + ], + "parts":{ + "index":{ + "type":"string", + "description":"The index or data stream name" + } + } + } + ] + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 335e0b4783bf0..8e8afafc9f069 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -84,32 +84,6 @@ - match: { error.type: "illegal_argument_exception" } - match: { error.reason: "Types cannot be provided in put mapping requests" } ---- -"Put mappings with explicit _doc type bwc": - - skip: - cluster_features: [ "gte_v8.0.0"] - reason: "old deprecation message for pre 8.0" - - requires: - test_runner_features: ["node_selector"] - - do: - indices.create: - index: test_index - - - do: - node_selector: - version: "original" - catch: bad_request - indices.put_mapping: - index: test_index - body: - _doc: - properties: - field: - type: keyword - - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "Types cannot be provided in put mapping requests, unless the include_type_name parameter is set to true." } - --- "Update per-field metadata": diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index ff902dbede007..51896fb80a62c 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -479,5 +479,5 @@ exports org.elasticsearch.lucene.spatial; exports org.elasticsearch.inference.configuration; exports org.elasticsearch.monitor.metrics; - + exports org.elasticsearch.plugins.internal.rewriter to org.elasticsearch.inference; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index ac083862357d6..388123e86c882 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -137,6 +137,8 @@ static TransportVersion def(int id) { public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_00_0); public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_00_0); public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_00_0); + public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_00_0); + public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 24aa5bd261d7e..47c43eadcfb03 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -187,11 +187,12 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_2 = new Version(8_15_02_99); public static final Version V_8_15_3 = new Version(8_15_03_99); public static final Version V_8_15_4 = new Version(8_15_04_99); - public static final Version V_8_15_6 = new Version(8_15_06_99); + public static final Version V_8_15_5 = new Version(8_15_05_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_17_0 = new Version(8_17_00_99); + public static final Version V_8_17_1 = new Version(8_17_01_99); public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 952789e1bf746..c74275991c899 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -1116,10 +1116,10 @@ public IndexVersion getCreationVersion() { /** * Return the {@link IndexVersion} that this index provides compatibility for. - * This is typically compared to the {@link IndexVersions#MINIMUM_COMPATIBLE} to figure out whether the index can be handled - * by the cluster. - * By default, this is equal to the {@link #getCreationVersion()}, but can also be a newer version if the index has been imported as - * a legacy index from an older snapshot, and its metadata has been converted to be handled by newer version nodes. + * This is typically compared to the {@link IndexVersions#MINIMUM_COMPATIBLE} or {@link IndexVersions#MINIMUM_READONLY_COMPATIBLE} + * to figure out whether the index can be handled by the cluster. + * By default, this is equal to the {@link #getCreationVersion()}, but can also be a newer version if the index has been created by + * a legacy version, and imported archive, in which case its metadata has been converted to be handled by newer version nodes. */ public IndexVersion getCompatibilityVersion() { return indexCompatibilityVersion; diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index f3a3fc9f771d4..afadb8f5b3011 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -524,8 +524,7 @@ static void checkForIndexCompatibility(Logger logger, DataPath... dataPaths) thr logger.info("oldest index version recorded in NodeMetadata {}", metadata.oldestIndexVersion()); - if (metadata.oldestIndexVersion().isLegacyIndexVersion()) { - + if (metadata.oldestIndexVersion().before(IndexVersions.MINIMUM_COMPATIBLE)) { String bestDowngradeVersion = getBestDowngradeVersion(metadata.previousNodeVersion().toString()); throw new IllegalStateException( "Cannot start this node because it holds metadata for indices with version [" diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 64182b000827d..2168ad1df5d2f 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -58,6 +58,7 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.IndexStorePlugin; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.threadpool.ThreadPool; @@ -478,7 +479,8 @@ public IndexService newIndexService( IdFieldMapper idFieldMapper, ValuesSourceRegistry valuesSourceRegistry, IndexStorePlugin.IndexFoldersDeletionListener indexFoldersDeletionListener, - Map snapshotCommitSuppliers + Map snapshotCommitSuppliers, + QueryRewriteInterceptor queryRewriteInterceptor ) throws IOException { final IndexEventListener eventListener = freeze(); Function> readerWrapperFactory = indexReaderWrapper @@ -540,7 +542,8 @@ public IndexService newIndexService( indexFoldersDeletionListener, snapshotCommitSupplier, indexCommitListener.get(), - mapperMetrics + mapperMetrics, + queryRewriteInterceptor ); success = true; return indexService; diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 571bbd76a49dd..a5b3991d89bc4 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -85,6 +85,7 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.IndexStorePlugin; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.threadpool.ThreadPool; @@ -162,6 +163,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final Supplier indexSortSupplier; private final ValuesSourceRegistry valuesSourceRegistry; private final MapperMetrics mapperMetrics; + private final QueryRewriteInterceptor queryRewriteInterceptor; @SuppressWarnings("this-escape") public IndexService( @@ -196,7 +198,8 @@ public IndexService( IndexStorePlugin.IndexFoldersDeletionListener indexFoldersDeletionListener, IndexStorePlugin.SnapshotCommitSupplier snapshotCommitSupplier, Engine.IndexCommitListener indexCommitListener, - MapperMetrics mapperMetrics + MapperMetrics mapperMetrics, + QueryRewriteInterceptor queryRewriteInterceptor ) { super(indexSettings); assert indexCreationContext != IndexCreationContext.RELOAD_ANALYZERS @@ -271,6 +274,7 @@ public IndexService( this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners); this.indexCommitListener = indexCommitListener; this.mapperMetrics = mapperMetrics; + this.queryRewriteInterceptor = queryRewriteInterceptor; try (var ignored = threadPool.getThreadContext().clearTraceContext()) { // kick off async ops for the first shard in this index this.refreshTask = new AsyncRefreshTask(this); @@ -802,6 +806,7 @@ public QueryRewriteContext newQueryRewriteContext( allowExpensiveQueries, scriptService, null, + null, null ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index cfb51cc3b5aef..22deee1325625 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -123,8 +123,13 @@ public static IndexVersion current() { return CurrentHolder.CURRENT; } + /** + * Returns whether this index version is supported by this node version out-of-the-box. + * This is used to distinguish between ordinary indices and archive indices that may be + * imported into the cluster in read-only mode, and with limited functionality. + */ public boolean isLegacyIndexVersion() { - return before(IndexVersions.MINIMUM_COMPATIBLE); + return before(IndexVersions.MINIMUM_READONLY_COMPATIBLE); } public static IndexVersion getMinimumCompatibleIndexVersion(int versionId) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 5589508507aec..0aaae2104576a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -192,6 +192,7 @@ private static Version parseUnchecked(String version) { */ public static final IndexVersion MINIMUM_COMPATIBLE = V_8_0_0; + public static final IndexVersion MINIMUM_READONLY_COMPATIBLE = V_7_0_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; @@ -207,7 +208,7 @@ static NavigableMap getAllVersionIds(Class cls) { Map versionIdFields = new HashMap<>(); NavigableMap builder = new TreeMap<>(); - Set ignore = Set.of("ZERO", "MINIMUM_COMPATIBLE"); + Set ignore = Set.of("ZERO", "MINIMUM_COMPATIBLE", "MINIMUM_READONLY_COMPATIBLE"); for (Field declaredField : cls.getFields()) { if (declaredField.getType().equals(IndexVersion.class)) { diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index f00e6904feac7..05262798bac2a 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.SuggestingErrorOnUnknown; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.xcontent.AbstractObjectParser; import org.elasticsearch.xcontent.FilterXContentParser; import org.elasticsearch.xcontent.FilterXContentParserWrapper; @@ -278,6 +279,14 @@ protected static List readQueries(StreamInput in) throws IOExcepti @Override public final QueryBuilder rewrite(QueryRewriteContext queryRewriteContext) throws IOException { + QueryRewriteInterceptor queryRewriteInterceptor = queryRewriteContext.getQueryRewriteInterceptor(); + if (queryRewriteInterceptor != null) { + var rewritten = queryRewriteInterceptor.interceptAndRewrite(queryRewriteContext, this); + if (rewritten != this) { + return new InterceptedQueryBuilderWrapper(rewritten); + } + } + QueryBuilder rewritten = doRewrite(queryRewriteContext); if (rewritten == this) { return rewritten; diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java index e054f17ef64d6..a84455ef09bf2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java @@ -104,6 +104,7 @@ public CoordinatorRewriteContext( null, null, null, + null, null ); this.dateFieldRangeInfo = dateFieldRangeInfo; diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java index aacb4b4129c73..31bc7dddacb7f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java @@ -66,6 +66,9 @@ public InnerHitBuilder innerHitBuilder() { public static void extractInnerHits(QueryBuilder query, Map innerHitBuilders) { if (query instanceof AbstractQueryBuilder) { ((AbstractQueryBuilder) query).extractInnerHitBuilders(innerHitBuilders); + } else if (query instanceof InterceptedQueryBuilderWrapper interceptedQuery) { + // Unwrap an intercepted query here + extractInnerHits(interceptedQuery.queryBuilder, innerHitBuilders); } else { throw new IllegalStateException( "provided query builder [" + query.getClass() + "] class should inherit from AbstractQueryBuilder, but it doesn't" diff --git a/server/src/main/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapper.java b/server/src/main/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapper.java new file mode 100644 index 0000000000000..b1030e4a76d97 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapper.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Wrapper for instances of {@link QueryBuilder} that have been intercepted using the {@link QueryRewriteInterceptor} to + * break out of the rewrite phase. These instances are unwrapped on serialization. + */ +class InterceptedQueryBuilderWrapper implements QueryBuilder { + + protected final QueryBuilder queryBuilder; + + InterceptedQueryBuilderWrapper(QueryBuilder queryBuilder) { + super(); + this.queryBuilder = queryBuilder; + } + + @Override + public QueryBuilder rewrite(QueryRewriteContext queryRewriteContext) throws IOException { + QueryRewriteInterceptor queryRewriteInterceptor = queryRewriteContext.getQueryRewriteInterceptor(); + try { + queryRewriteContext.setQueryRewriteInterceptor(null); + QueryBuilder rewritten = queryBuilder.rewrite(queryRewriteContext); + return rewritten != queryBuilder ? new InterceptedQueryBuilderWrapper(rewritten) : this; + } finally { + queryRewriteContext.setQueryRewriteInterceptor(queryRewriteInterceptor); + } + } + + @Override + public String getWriteableName() { + return queryBuilder.getWriteableName(); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return queryBuilder.getMinimalSupportedVersion(); + } + + @Override + public Query toQuery(SearchExecutionContext context) throws IOException { + return queryBuilder.toQuery(context); + } + + @Override + public QueryBuilder queryName(String queryName) { + queryBuilder.queryName(queryName); + return this; + } + + @Override + public String queryName() { + return queryBuilder.queryName(); + } + + @Override + public float boost() { + return queryBuilder.boost(); + } + + @Override + public QueryBuilder boost(float boost) { + queryBuilder.boost(boost); + return this; + } + + @Override + public String getName() { + return queryBuilder.getName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + queryBuilder.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return queryBuilder.toXContent(builder, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o instanceof InterceptedQueryBuilderWrapper == false) return false; + return Objects.equals(queryBuilder, ((InterceptedQueryBuilderWrapper) o).queryBuilder); + } + + @Override + public int hashCode() { + return Objects.hashCode(queryBuilder); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index fce74aa60ab16..265a0c52593bd 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.builder.PointInTimeBuilder; @@ -70,6 +71,7 @@ public class QueryRewriteContext { protected Predicate allowedFields; private final ResolvedIndices resolvedIndices; private final PointInTimeBuilder pit; + private QueryRewriteInterceptor queryRewriteInterceptor; public QueryRewriteContext( final XContentParserConfiguration parserConfiguration, @@ -86,7 +88,8 @@ public QueryRewriteContext( final BooleanSupplier allowExpensiveQueries, final ScriptCompiler scriptService, final ResolvedIndices resolvedIndices, - final PointInTimeBuilder pit + final PointInTimeBuilder pit, + final QueryRewriteInterceptor queryRewriteInterceptor ) { this.parserConfiguration = parserConfiguration; @@ -105,6 +108,7 @@ public QueryRewriteContext( this.scriptService = scriptService; this.resolvedIndices = resolvedIndices; this.pit = pit; + this.queryRewriteInterceptor = queryRewriteInterceptor; } public QueryRewriteContext(final XContentParserConfiguration parserConfiguration, final Client client, final LongSupplier nowInMillis) { @@ -123,6 +127,7 @@ public QueryRewriteContext(final XContentParserConfiguration parserConfiguration null, null, null, + null, null ); } @@ -132,7 +137,8 @@ public QueryRewriteContext( final Client client, final LongSupplier nowInMillis, final ResolvedIndices resolvedIndices, - final PointInTimeBuilder pit + final PointInTimeBuilder pit, + final QueryRewriteInterceptor queryRewriteInterceptor ) { this( parserConfiguration, @@ -149,7 +155,8 @@ public QueryRewriteContext( null, null, resolvedIndices, - pit + pit, + queryRewriteInterceptor ); } @@ -428,4 +435,13 @@ public String getTierPreference() { // It was decided we should only test the first of these potentially multiple preferences. return value.split(",")[0].trim(); } + + public QueryRewriteInterceptor getQueryRewriteInterceptor() { + return queryRewriteInterceptor; + } + + public void setQueryRewriteInterceptor(QueryRewriteInterceptor queryRewriteInterceptor) { + this.queryRewriteInterceptor = queryRewriteInterceptor; + } + } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index fbc3696d40221..b2ee6134a7728 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -271,6 +271,7 @@ private SearchExecutionContext( allowExpensiveQueries, scriptService, null, + null, null ); this.shardId = shardId; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 818bf2036e3b1..a5765a1a707d2 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -135,6 +135,7 @@ import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; @@ -262,6 +263,7 @@ public class IndicesService extends AbstractLifecycleComponent private final MapperMetrics mapperMetrics; private final PostRecoveryMerger postRecoveryMerger; private final List searchOperationListeners; + private final QueryRewriteInterceptor queryRewriteInterceptor; @Override protected void doStart() { @@ -330,6 +332,7 @@ public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, lon this.indexFoldersDeletionListeners = new CompositeIndexFoldersDeletionListener(builder.indexFoldersDeletionListeners); this.snapshotCommitSuppliers = builder.snapshotCommitSuppliers; this.requestCacheKeyDifferentiator = builder.requestCacheKeyDifferentiator; + this.queryRewriteInterceptor = builder.queryRewriteInterceptor; this.mapperMetrics = builder.mapperMetrics; // doClose() is called when shutting down a node, yet there might still be ongoing requests // that we need to wait for before closing some resources such as the caches. In order to @@ -779,7 +782,8 @@ private synchronized IndexService createIndexService( idFieldMappers.apply(idxSettings.getMode()), valuesSourceRegistry, indexFoldersDeletionListeners, - snapshotCommitSuppliers + snapshotCommitSuppliers, + queryRewriteInterceptor ); } @@ -1766,7 +1770,7 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set requestCacheKeyDifferentiator; MapperMetrics mapperMetrics; List searchOperationListener = List.of(); + QueryRewriteInterceptor queryRewriteInterceptor = null; public IndicesServiceBuilder settings(Settings settings) { this.settings = settings; @@ -239,6 +242,27 @@ public IndicesService build() { .flatMap(m -> m.entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + var queryRewriteInterceptors = pluginsService.filterPlugins(SearchPlugin.class) + .map(SearchPlugin::getQueryRewriteInterceptors) + .flatMap(List::stream) + .collect(Collectors.toMap(QueryRewriteInterceptor::getQueryName, interceptor -> { + if (interceptor.getQueryName() == null) { + throw new IllegalArgumentException("QueryRewriteInterceptor [" + interceptor.getClass().getName() + "] requires name"); + } + return interceptor; + }, (a, b) -> { + throw new IllegalStateException( + "Conflicting rewrite interceptors [" + + a.getQueryName() + + "] found in [" + + a.getClass().getName() + + "] and [" + + b.getClass().getName() + + "]" + ); + })); + queryRewriteInterceptor = QueryRewriteInterceptor.multi(queryRewriteInterceptors); + return new IndicesService(this); } } diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Binder.java b/server/src/main/java/org/elasticsearch/injection/guice/Binder.java index c34bebd10c2e1..d59edfce89183 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Binder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Binder.java @@ -65,9 +65,7 @@ * *

The {@link Provider} you use here does not have to be a "factory"; that * is, a provider which always creates each instance it provides. - * However, this is generally a good practice to follow. You can then use - * Guice's concept of {@link Scope scopes} to guide when creation should happen - * -- "letting Guice work for you". + * However, this is generally a good practice to follow. * *

  *     bind(Service.class).annotatedWith(Red.class).to(ServiceImpl.class);
diff --git a/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java b/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java index 9223261ec2dd5..677f111c764a4 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java @@ -218,7 +218,6 @@ private void putBinding(BindingImpl binding) { MembersInjector.class, Module.class, Provider.class, - Scope.class, TypeLiteral.class ); // TODO(jessewilson): fix BuiltInModule, then add Stage diff --git a/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java b/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java index 99d42faf6a803..fe9ac309e23f4 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.injection.guice.internal.Errors; import org.elasticsearch.injection.guice.internal.ErrorsException; import org.elasticsearch.injection.guice.internal.InternalContext; +import org.elasticsearch.injection.guice.internal.Scoping; import org.elasticsearch.injection.guice.internal.Stopwatch; import org.elasticsearch.injection.guice.spi.Dependency; @@ -154,7 +155,7 @@ public static void loadEagerSingletons(InjectorImpl injector, Errors errors) { } private static void loadEagerSingletons(InjectorImpl injector, final Errors errors, BindingImpl binding) { - if (binding.getScoping().isEagerSingleton()) { + if (binding.getScoping() == Scoping.EAGER_SINGLETON) { try { injector.callInContext(new ContextualCallable() { final Dependency dependency = Dependency.get(binding.getKey()); diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Provider.java b/server/src/main/java/org/elasticsearch/injection/guice/Provider.java index 692617239ea74..6de9d8ff9dc85 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Provider.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Provider.java @@ -28,8 +28,6 @@ * instances, instances you wish to safely mutate and discard, instances which are out of scope * (e.g. using a {@code @RequestScoped} object from within a {@code @SessionScoped} object), or * instances that will be initialized lazily. - *
  • A custom {@link Scope} is implemented as a decorator of {@code Provider}, which decides - * when to delegate to the backing provider and when to provide the instance some other way. *
  • The {@link Injector} offers access to the {@code Provider} it uses to fulfill requests * for a given key, via the {@link Injector#getProvider} methods. * diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Scope.java b/server/src/main/java/org/elasticsearch/injection/guice/Scope.java deleted file mode 100644 index 681fc17bc6353..0000000000000 --- a/server/src/main/java/org/elasticsearch/injection/guice/Scope.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.injection.guice; - -/** - * A scope is a level of visibility that instances provided by Guice may have. - * By default, an instance created by the {@link Injector} has no scope, - * meaning it has no state from the framework's perspective -- the - * {@code Injector} creates it, injects it once into the class that required it, - * and then immediately forgets it. Associating a scope with a particular - * binding allows the created instance to be "remembered" and possibly used - * again for other injections. - *

    - * An example of a scope is {@link Scopes#SINGLETON}. - * - * @author crazybob@google.com (Bob Lee) - */ -public interface Scope { - - /** - * Scopes a provider. The returned provider returns objects from this scope. - * If an object does not exist in this scope, the provider can use the given - * unscoped provider to retrieve one. - *

    - * Scope implementations are strongly encouraged to override - * {@link Object#toString} in the returned provider and include the backing - * provider's {@code toString()} output. - * - * @param unscoped locates an instance when one doesn't already exist in this - * scope. - * @return a new provider which only delegates to the given unscoped provider - * when an instance of the requested object doesn't already exist in this - * scope - */ - Provider scope(Provider unscoped); - - /** - * A short but useful description of this scope. For comparison, the standard - * scopes that ship with guice use the descriptions - * {@code "Scopes.SINGLETON"}, {@code "ServletScopes.SESSION"} and - * {@code "ServletScopes.REQUEST"}. - */ - @Override - String toString(); -} diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java b/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java index d5b61407b4975..5f05d0337654c 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java @@ -19,8 +19,6 @@ import org.elasticsearch.injection.guice.internal.InternalFactory; import org.elasticsearch.injection.guice.internal.Scoping; -import java.util.Locale; - /** * Built-in scope implementations. * @@ -31,29 +29,27 @@ public class Scopes { private Scopes() {} /** - * One instance per {@link Injector}. + * Scopes an internal factory. */ - public static final Scope SINGLETON = new Scope() { - @Override - public Provider scope(final Provider creator) { - return new Provider() { + static InternalFactory scope(InjectorImpl injector, InternalFactory creator, Scoping scoping) { + return switch (scoping) { + case UNSCOPED -> creator; + case EAGER_SINGLETON -> new InternalFactoryToProviderAdapter<>(Initializables.of(new Provider<>() { private volatile T instance; - // DCL on a volatile is safe as of Java 5, which we obviously require. @Override - @SuppressWarnings("DoubleCheckedLocking") public T get() { if (instance == null) { /* - * Use a pretty coarse lock. We don't want to run into deadlocks - * when two threads try to load circularly-dependent objects. - * Maybe one of these days we will identify independent graphs of - * objects and offer to load them in parallel. - */ - synchronized (InjectorImpl.class) { + * Use a pretty coarse lock. We don't want to run into deadlocks + * when two threads try to load circularly-dependent objects. + * Maybe one of these days we will identify independent graphs of + * objects and offer to load them in parallel. + */ + synchronized (injector) { if (instance == null) { - instance = creator.get(); + instance = new ProviderToInternalFactoryAdapter<>(injector, creator).get(); } } } @@ -62,54 +58,10 @@ public T get() { @Override public String toString() { - return String.format(Locale.ROOT, "%s[%s]", creator, SINGLETON); + return creator + "[SINGLETON]"; } - }; - } - - @Override - public String toString() { - return "Scopes.SINGLETON"; - } - }; - - /** - * No scope; the same as not applying any scope at all. Each time the - * Injector obtains an instance of an object with "no scope", it injects this - * instance then immediately forgets it. When the next request for the same - * binding arrives it will need to obtain the instance over again. - *

    - * This exists only in case a class has been annotated with a scope - * annotation and you need to override this to "no scope" in your binding. - * - * @since 2.0 - */ - public static final Scope NO_SCOPE = new Scope() { - @Override - public Provider scope(Provider unscoped) { - return unscoped; - } - - @Override - public String toString() { - return "Scopes.NO_SCOPE"; - } - }; - - /** - * Scopes an internal factory. - */ - static InternalFactory scope(InjectorImpl injector, InternalFactory creator, Scoping scoping) { - - if (scoping.isNoScope()) { - return creator; - } - - Scope scope = scoping.getScopeInstance(); - - // TODO: use diamond operator once JI-9019884 is fixed - Provider scoped = scope.scope(new ProviderToInternalFactoryAdapter(injector, creator)); - return new InternalFactoryToProviderAdapter<>(Initializables.of(scoped)); + })); + }; } } diff --git a/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java b/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java index 28053c5f1d557..ee54c8aa93520 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java @@ -77,7 +77,7 @@ protected void checkNotScoped() { return; } - if (binding.getScoping().isExplicitlyScoped()) { + if (binding.getScoping() != Scoping.UNSCOPED) { binder.addError(SCOPE_ALREADY_SET); } } diff --git a/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java b/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java index fcb03f34f4204..e1c04ea8e348f 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java @@ -16,8 +16,7 @@ package org.elasticsearch.injection.guice.internal; -import org.elasticsearch.injection.guice.Scope; -import org.elasticsearch.injection.guice.Scopes; +import org.elasticsearch.injection.guice.Injector; /** * References a scope, either directly (as a scope instance), or indirectly (as a scope annotation). @@ -25,69 +24,14 @@ * * @author jessewilson@google.com (Jesse Wilson) */ -public abstract class Scoping { - +public enum Scoping { /** * No scoping annotation has been applied. Note that this is different from {@code * in(Scopes.NO_SCOPE)}, where the 'NO_SCOPE' has been explicitly applied. */ - public static final Scoping UNSCOPED = new Scoping() { - - @Override - public Scope getScopeInstance() { - return Scopes.NO_SCOPE; - } - - @Override - public String toString() { - return Scopes.NO_SCOPE.toString(); - } - - }; - - public static final Scoping EAGER_SINGLETON = new Scoping() { - - @Override - public Scope getScopeInstance() { - return Scopes.SINGLETON; - } - - @Override - public String toString() { - return "eager singleton"; - } - - }; - + UNSCOPED, /** - * Returns true if this scope was explicitly applied. If no scope was explicitly applied then the - * scoping annotation will be used. + * One instance per {@link Injector}. */ - public boolean isExplicitlyScoped() { - return this != UNSCOPED; - } - - /** - * Returns true if this is the default scope. In this case a new instance will be provided for - * each injection. - */ - public boolean isNoScope() { - return getScopeInstance() == Scopes.NO_SCOPE; - } - - /** - * Returns true if this scope is a singleton that should be loaded eagerly in {@code stage}. - */ - public boolean isEagerSingleton() { - return this == EAGER_SINGLETON; - } - - /** - * Returns the scope instance, or {@code null} if that isn't known for this instance. - */ - public Scope getScopeInstance() { - return null; - } - - private Scoping() {} + EAGER_SINGLETON } diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index f5670ebd8a543..e87e9ee85b29c 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -128,6 +129,14 @@ default List> getQueries() { return emptyList(); } + /** + * @return Applicable {@link QueryRewriteInterceptor}s configured for this plugin. + * Note: This is internal to Elasticsearch's API and not extensible by external plugins. + */ + default List getQueryRewriteInterceptors() { + return emptyList(); + } + /** * The new {@link Aggregation}s added by this plugin. */ diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/rewriter/QueryRewriteInterceptor.java b/server/src/main/java/org/elasticsearch/plugins/internal/rewriter/QueryRewriteInterceptor.java new file mode 100644 index 0000000000000..8f4fb2ce7491a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/internal/rewriter/QueryRewriteInterceptor.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins.internal.rewriter; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; + +import java.util.Map; + +/** + * Enables modules and plugins to intercept and rewrite queries during the query rewrite phase on the coordinator node. + */ +public interface QueryRewriteInterceptor { + + /** + * Intercepts and returns a rewritten query if modifications are required; otherwise, + * returns the same provided {@link QueryBuilder} instance unchanged. + * + * @param context the {@link QueryRewriteContext} providing the context for the rewrite operation + * @param queryBuilder the original {@link QueryBuilder} to potentially rewrite + * @return the rewritten {@link QueryBuilder}, or the original instance if no rewrite was needed + */ + QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder); + + /** + * Name of the query to be intercepted and rewritten. + */ + String getQueryName(); + + static QueryRewriteInterceptor multi(Map interceptors) { + return interceptors.isEmpty() ? new NoOpQueryRewriteInterceptor() : new CompositeQueryRewriteInterceptor(interceptors); + } + + class CompositeQueryRewriteInterceptor implements QueryRewriteInterceptor { + final String NAME = "composite"; + private final Map interceptors; + + private CompositeQueryRewriteInterceptor(Map interceptors) { + this.interceptors = interceptors; + } + + @Override + public String getQueryName() { + return NAME; + } + + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + QueryRewriteInterceptor interceptor = interceptors.get(queryBuilder.getName()); + if (interceptor != null) { + return interceptor.interceptAndRewrite(context, queryBuilder); + } + return queryBuilder; + } + } + + class NoOpQueryRewriteInterceptor implements QueryRewriteInterceptor { + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + return queryBuilder; + } + + @Override + public String getQueryName() { + return null; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java index e767bf5e3723b..b236b1fa730f1 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java @@ -102,7 +102,9 @@ public RepositoriesModule( } if (preRestoreChecks.isEmpty()) { preRestoreChecks.add((snapshot, version) -> { - if (version.isLegacyIndexVersion()) { + // pre-restore checks will be run against the version in which the snapshot was created as well as + // the version in which the restored index was created + if (version.before(IndexVersions.MINIMUM_COMPATIBLE)) { throw new SnapshotRestoreException( snapshot, "the snapshot was created with Elasticsearch version [" diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java index e30d2f8d5c733..ba6fed3ea35d4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -78,10 +78,7 @@ static Map prepareMappings(Map source) { Map mappings = (Map) source.get("mappings"); if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." + "The mapping definition cannot be nested under a type [" + MapperService.SINGLE_MAPPING_NAME + "]." ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 5f648ca8e77e5..0c7772bc3a69c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -9,15 +9,12 @@ package org.elasticsearch.rest.action.admin.indices; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -35,13 +32,6 @@ public class RestGetFieldMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetFieldMappingAction.class); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); - public static final String INCLUDE_TYPE_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " - + "field mapping requests is deprecated. The parameter will be removed in the next major version."; - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get field mapping request is deprecated. " - + "Use typeless api instead"; - @Override public List routes() { return List.of(new Route(GET, "/_mapping/field/{fields}"), new Route(GET, "/{index}/_mapping/field/{fields}")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java index 9ca890eaff65b..9be3462e97e0c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -35,9 +34,6 @@ */ @ServerlessScope(Scope.PUBLIC) public class RestGetIndicesAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndicesAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using `include_type_name` in get indices requests" - + " is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 242bcd399413b..27620fa750ea9 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.TimeValue; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.rest.BaseRestHandler; @@ -31,11 +30,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestGetMappingAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetMappingAction.class); - public static final String INCLUDE_TYPE_DEPRECATION_MSG = "[types removal] Using include_type_name in get" - + " mapping requests is deprecated. The parameter will be removed in the next major version."; - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get mapping request is deprecated. " - + "Use typeless api instead"; public RestGetMappingAction() {} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java index ee1710f39ce41..5fd4ec83c1a18 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -27,7 +26,6 @@ import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; public abstract class RestResizeHandler extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestResizeHandler.class); RestResizeHandler() {} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index 776302296b1a2..39d7b1d5851db 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DataStream; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -33,10 +32,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestRolloverIndexAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestRolloverIndexAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in rollover " - + "index requests is deprecated. The parameter will be removed in the next major version."; - @Override public List routes() { return List.of(new Route(POST, "/{index}/_rollover"), new Route(POST, "/{index}/_rollover/{new_index}")); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java index f623b3040f1c5..ef8769b688c64 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java @@ -12,8 +12,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -30,9 +28,6 @@ * display values of this field. */ public final class FieldAndFormat implements Writeable, ToXContentObject { - private static final String USE_DEFAULT_FORMAT = "use_field_mapping"; - private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(FetchDocValuesPhase.class); - public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField FORMAT_FIELD = new ParseField("format"); public static final ParseField INCLUDE_UNMAPPED_FIELD = new ParseField("include_unmapped"); @@ -48,28 +43,6 @@ public final class FieldAndFormat implements Writeable, ToXContentObject { PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), INCLUDE_UNMAPPED_FIELD); } - private static CheckedFunction ignoreUseFieldMappingStringParser() { - return (p) -> { - if (p.currentToken() == XContentParser.Token.VALUE_NULL) { - return null; - } else { - String text = p.text(); - if (text.equals(USE_DEFAULT_FORMAT)) { - DEPRECATION_LOGGER.compatibleCritical( - "explicit_default_format", - "[" - + USE_DEFAULT_FORMAT - + "] is a special format that was only used to " - + "ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore." - ); - return null; - } else { - return text; - } - } - }; - } - /** * Parse a {@link FieldAndFormat} from some {@link XContent}. */ diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index faeb7fe848159..08db0822dfef5 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -135,3 +135,4 @@ 8.15.5,8702003 8.16.0,8772001 8.16.1,8772004 +8.17.0,8797002 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 1fc8bd8648ad6..afe696f31d323 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -135,3 +135,4 @@ 8.15.5,8512000 8.16.0,8518000 8.16.1,8518000 +8.17.0,8521000 diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 7640c1e7af308..550f8e5843628 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -12,24 +12,16 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static org.hamcrest.Matchers.hasKey; - public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCase { public void testManualSerialization() throws IOException { @@ -56,36 +48,6 @@ public void testNullFieldMappingToXContent() { assertEquals("{\"index\":{\"mappings\":{}}}", Strings.toString(response)); } - public void testToXContentIncludesType() throws Exception { - Map> mappings = new HashMap<>(); - FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{}")); - mappings.put("index", Collections.singletonMap("field", fieldMappingMetadata)); - GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings); - ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap("include_type_name", "true")); - - // v8 does not have _doc, even when include_type_name is present - // (although this throws unconsumed parameter exception in RestGetFieldMappingsAction) - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_8)) { - response.toXContent(builder, params); - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - @SuppressWarnings("unchecked") - Map> index = (Map>) parser.map().get("index"); - assertThat(index.get("mappings"), hasKey("field")); - } - } - - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_8)) { - response.toXContent(builder, ToXContent.EMPTY_PARAMS); - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - @SuppressWarnings("unchecked") - Map> index = (Map>) parser.map().get("index"); - assertThat(index.get("mappings"), hasKey("field")); - } - } - } - @Override protected GetFieldMappingsResponse createTestInstance() { return new GetFieldMappingsResponse(randomMapping()); diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index a94427cd0df6f..0707f1356516a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -1744,7 +1744,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { NodeClient client = new NodeClient(settings, threadPool); SearchService searchService = mock(SearchService.class); - when(searchService.getRewriteContext(any(), any(), any())).thenReturn(new QueryRewriteContext(null, null, null, null, null)); + when(searchService.getRewriteContext(any(), any(), any())).thenReturn( + new QueryRewriteContext(null, null, null, null, null, null) + ); ClusterService clusterService = new ClusterService( settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java index 8036a964071d2..4abd0c4a9d469 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java @@ -690,7 +690,12 @@ public static Map randomInferenceFields() { } private static InferenceFieldMetadata randomInferenceFieldMetadata(String name) { - return new InferenceFieldMetadata(name, randomIdentifier(), randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new)); + return new InferenceFieldMetadata( + name, + randomIdentifier(), + randomIdentifier(), + randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new) + ); } private IndexMetadataStats randomIndexStats(int numberOfShards) { diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 49a4d519c0ea4..c519d4834148d 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -86,6 +86,7 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.IndexStorePlugin; +import org.elasticsearch.plugins.internal.rewriter.MockQueryRewriteInterceptor; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.test.ClusterServiceUtils; @@ -223,7 +224,8 @@ private IndexService newIndexService(IndexModule module) throws IOException { module.indexSettings().getMode().idFieldMapperWithoutFieldData(), null, indexDeletionListener, - emptyMap() + emptyMap(), + new MockQueryRewriteInterceptor() ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java index 809fb161fcbe5..b1470c1ee5b3b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; -import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; @@ -94,6 +93,7 @@ private static class TestInferenceFieldMapper extends FieldMapper implements Inf public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n)); public static final String INFERENCE_ID = "test_inference_id"; + public static final String SEARCH_INFERENCE_ID = "test_search_inference_id"; public static final String CONTENT_TYPE = "test_inference_field"; TestInferenceFieldMapper(String simpleName) { @@ -102,7 +102,7 @@ private static class TestInferenceFieldMapper extends FieldMapper implements Inf @Override public InferenceFieldMetadata getMetadata(Set sourcePaths) { - return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, sourcePaths.toArray(new String[0])); + return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, SEARCH_INFERENCE_ID, sourcePaths.toArray(new String[0])); } @Override @@ -111,7 +111,7 @@ public Object getOriginalValue(Map sourceAsMap) { } @Override - protected void parseCreateField(DocumentParserContext context) throws IOException {} + protected void parseCreateField(DocumentParserContext context) {} @Override public Builder getMergeBuilder() { diff --git a/server/src/test/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapperTests.java b/server/src/test/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapperTests.java new file mode 100644 index 0000000000000..6c570e0e71725 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapperTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.threadpool.TestThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; + +public class InterceptedQueryBuilderWrapperTests extends ESTestCase { + + private TestThreadPool threadPool; + private NoOpClient client; + + @Before + public void setup() { + threadPool = createThreadPool(); + client = new NoOpClient(threadPool); + } + + @After + public void cleanup() { + threadPool.close(); + } + + public void testQueryNameReturnsWrappedQueryBuilder() { + MatchAllQueryBuilder matchAllQueryBuilder = new MatchAllQueryBuilder(); + InterceptedQueryBuilderWrapper interceptedQueryBuilderWrapper = new InterceptedQueryBuilderWrapper(matchAllQueryBuilder); + String queryName = randomAlphaOfLengthBetween(5, 10); + QueryBuilder namedQuery = interceptedQueryBuilderWrapper.queryName(queryName); + assertTrue(namedQuery instanceof InterceptedQueryBuilderWrapper); + assertEquals(queryName, namedQuery.queryName()); + } + + public void testQueryBoostReturnsWrappedQueryBuilder() { + MatchAllQueryBuilder matchAllQueryBuilder = new MatchAllQueryBuilder(); + InterceptedQueryBuilderWrapper interceptedQueryBuilderWrapper = new InterceptedQueryBuilderWrapper(matchAllQueryBuilder); + float boost = randomFloat(); + QueryBuilder boostedQuery = interceptedQueryBuilderWrapper.boost(boost); + assertTrue(boostedQuery instanceof InterceptedQueryBuilderWrapper); + assertEquals(boost, boostedQuery.boost(), 0.0001f); + } + + public void testRewrite() throws IOException { + QueryRewriteContext context = new QueryRewriteContext(null, client, null); + context.setQueryRewriteInterceptor(myMatchInterceptor); + + // Queries that are not intercepted behave normally + TermQueryBuilder termQueryBuilder = new TermQueryBuilder("field", "value"); + QueryBuilder rewritten = termQueryBuilder.rewrite(context); + assertTrue(rewritten instanceof TermQueryBuilder); + + // Queries that should be intercepted are and the right thing happens + MatchQueryBuilder matchQueryBuilder = new MatchQueryBuilder("field", "value"); + rewritten = matchQueryBuilder.rewrite(context); + assertTrue(rewritten instanceof InterceptedQueryBuilderWrapper); + assertTrue(((InterceptedQueryBuilderWrapper) rewritten).queryBuilder instanceof MatchQueryBuilder); + MatchQueryBuilder rewrittenMatchQueryBuilder = (MatchQueryBuilder) ((InterceptedQueryBuilderWrapper) rewritten).queryBuilder; + assertEquals("intercepted", rewrittenMatchQueryBuilder.value()); + + // An additional rewrite on an already intercepted query returns the same query + QueryBuilder rewrittenAgain = rewritten.rewrite(context); + assertTrue(rewrittenAgain instanceof InterceptedQueryBuilderWrapper); + assertEquals(rewritten, rewrittenAgain); + } + + private final QueryRewriteInterceptor myMatchInterceptor = new QueryRewriteInterceptor() { + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + if (queryBuilder instanceof MatchQueryBuilder matchQueryBuilder) { + return new MatchQueryBuilder(matchQueryBuilder.fieldName(), "intercepted"); + } + return queryBuilder; + } + + @Override + public String getQueryName() { + return MatchQueryBuilder.NAME; + } + }; +} diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java index d07bcf54fdf09..5dd231ab97886 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java @@ -52,6 +52,7 @@ public void testGetTierPreference() { null, null, null, + null, null ); @@ -79,6 +80,7 @@ public void testGetTierPreference() { null, null, null, + null, null ); diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java index cc268a6021cb3..e232d10fdddbd 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java @@ -10,15 +10,24 @@ import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.SuppressForbidden; import org.junit.rules.ExternalResource; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.Objects; public class Ec2ImdsHttpFixture extends ExternalResource { + public static final String ENDPOINT_OVERRIDE_SYSPROP_NAME = "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride"; + private final Ec2ImdsServiceBuilder ec2ImdsServiceBuilder; private HttpServer server; @@ -52,4 +61,32 @@ private static InetSocketAddress resolveAddress() { throw new RuntimeException(e); } } + + @SuppressForbidden(reason = "deliberately adjusting system property for endpoint override for use in internal-cluster tests") + public static Releasable withEc2MetadataServiceEndpointOverride(String endpointOverride) { + final PrivilegedAction resetProperty = System.getProperty(ENDPOINT_OVERRIDE_SYSPROP_NAME) instanceof String originalValue + ? () -> System.setProperty(ENDPOINT_OVERRIDE_SYSPROP_NAME, originalValue) + : () -> System.clearProperty(ENDPOINT_OVERRIDE_SYSPROP_NAME); + doPrivileged(() -> System.setProperty(ENDPOINT_OVERRIDE_SYSPROP_NAME, endpointOverride)); + return () -> doPrivileged(resetProperty); + } + + private static void doPrivileged(PrivilegedAction privilegedAction) { + AccessController.doPrivileged(privilegedAction); + } + + public static void runWithFixture(Ec2ImdsServiceBuilder ec2ImdsServiceBuilder, CheckedConsumer action) { + final var imdsFixture = new Ec2ImdsHttpFixture(ec2ImdsServiceBuilder); + try { + imdsFixture.apply(new Statement() { + @Override + public void evaluate() throws Exception { + action.accept(imdsFixture); + } + }, Description.EMPTY).evaluate(); + } catch (Throwable e) { + throw new AssertionError(e); + } + } + } diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java index fd2044357257b..0c58205ac8d60 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java @@ -23,6 +23,7 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Collection; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.BiConsumer; @@ -43,6 +44,7 @@ public class Ec2ImdsHttpHandler implements HttpHandler { private final Set validImdsTokens = ConcurrentCollections.newConcurrentSet(); private final BiConsumer newCredentialsConsumer; + private final Map instanceAddresses; private final Set validCredentialsEndpoints = ConcurrentCollections.newConcurrentSet(); private final Supplier availabilityZoneSupplier; @@ -50,10 +52,12 @@ public Ec2ImdsHttpHandler( Ec2ImdsVersion ec2ImdsVersion, BiConsumer newCredentialsConsumer, Collection alternativeCredentialsEndpoints, - Supplier availabilityZoneSupplier + Supplier availabilityZoneSupplier, + Map instanceAddresses ) { this.ec2ImdsVersion = Objects.requireNonNull(ec2ImdsVersion); this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); + this.instanceAddresses = instanceAddresses; this.validCredentialsEndpoints.addAll(alternativeCredentialsEndpoints); this.availabilityZoneSupplier = availabilityZoneSupplier; } @@ -97,17 +101,11 @@ public void handle(final HttpExchange exchange) throws IOException { if (path.equals(IMDS_SECURITY_CREDENTIALS_PATH)) { final var profileName = randomIdentifier(); validCredentialsEndpoints.add(IMDS_SECURITY_CREDENTIALS_PATH + profileName); - final byte[] response = profileName.getBytes(StandardCharsets.UTF_8); - exchange.getResponseHeaders().add("Content-Type", "text/plain"); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); - exchange.getResponseBody().write(response); + sendStringResponse(exchange, profileName); return; } else if (path.equals("/latest/meta-data/placement/availability-zone")) { final var availabilityZone = availabilityZoneSupplier.get(); - final byte[] response = availabilityZone.getBytes(StandardCharsets.UTF_8); - exchange.getResponseHeaders().add("Content-Type", "text/plain"); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); - exchange.getResponseBody().write(response); + sendStringResponse(exchange, availabilityZone); return; } else if (validCredentialsEndpoints.contains(path)) { final String accessKey = randomIdentifier(); @@ -132,10 +130,20 @@ public void handle(final HttpExchange exchange) throws IOException { exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); exchange.getResponseBody().write(response); return; + } else if (instanceAddresses.get(path) instanceof String instanceAddress) { + sendStringResponse(exchange, instanceAddress); + return; } } ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError("not supported: " + requestMethod + " " + path)); } } + + private void sendStringResponse(HttpExchange exchange, String value) throws IOException { + final byte[] response = value.getBytes(StandardCharsets.UTF_8); + exchange.getResponseHeaders().add("Content-Type", "text/plain"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + } } diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsServiceBuilder.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsServiceBuilder.java index bca43da8683b6..505c9978bc4fb 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsServiceBuilder.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsServiceBuilder.java @@ -12,6 +12,8 @@ import org.elasticsearch.test.ESTestCase; import java.util.Collection; +import java.util.HashMap; +import java.util.Map; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Supplier; @@ -22,6 +24,7 @@ public class Ec2ImdsServiceBuilder { private BiConsumer newCredentialsConsumer = Ec2ImdsServiceBuilder::rejectNewCredentials; private Collection alternativeCredentialsEndpoints = Set.of(); private Supplier availabilityZoneSupplier = Ec2ImdsServiceBuilder::rejectAvailabilityZone; + private final Map instanceAddresses = new HashMap<>(); public Ec2ImdsServiceBuilder(Ec2ImdsVersion ec2ImdsVersion) { this.ec2ImdsVersion = ec2ImdsVersion; @@ -50,8 +53,19 @@ public Ec2ImdsServiceBuilder availabilityZoneSupplier(Supplier availabil return this; } + public Ec2ImdsServiceBuilder addInstanceAddress(String addressType, String addressValue) { + instanceAddresses.put("/latest/meta-data/" + addressType, addressValue); + return this; + } + public Ec2ImdsHttpHandler buildHandler() { - return new Ec2ImdsHttpHandler(ec2ImdsVersion, newCredentialsConsumer, alternativeCredentialsEndpoints, availabilityZoneSupplier); + return new Ec2ImdsHttpHandler( + ec2ImdsVersion, + newCredentialsConsumer, + alternativeCredentialsEndpoints, + availabilityZoneSupplier, + Map.copyOf(instanceAddresses) + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/internal/rewriter/MockQueryRewriteInterceptor.java b/test/framework/src/main/java/org/elasticsearch/plugins/internal/rewriter/MockQueryRewriteInterceptor.java new file mode 100644 index 0000000000000..196e5bd4f4a2d --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/plugins/internal/rewriter/MockQueryRewriteInterceptor.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins.internal.rewriter; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; + +public class MockQueryRewriteInterceptor implements QueryRewriteInterceptor { + + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + return queryBuilder; + } + + @Override + public String getQueryName() { + return this.getClass().getSimpleName(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index bdf323afb8d96..20cb66affddee 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -71,6 +71,8 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.plugins.internal.rewriter.MockQueryRewriteInterceptor; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptService; @@ -629,7 +631,8 @@ QueryRewriteContext createQueryRewriteContext() { () -> true, scriptService, createMockResolvedIndices(), - null + null, + createMockQueryRewriteInterceptor() ); } @@ -670,5 +673,9 @@ private ResolvedIndices createMockResolvedIndices() { Map.of(index, indexMetadata) ); } + + private QueryRewriteInterceptor createMockQueryRewriteInterceptor() { + return new MockQueryRewriteInterceptor(); + } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java index d86c15aa14bc9..558303f7e0f0f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java @@ -104,7 +104,7 @@ public boolean isNeedsActive() { return needsActive; } - /** Create a momentary feature for hte given license level */ + /** Create a momentary feature for the given license level */ public static Momentary momentary(String family, String name, License.OperationMode licenseLevel) { return new Momentary(family, name, licenseLevel, true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 4f8a18e28aea1..3c7b089b4cd63 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -106,6 +106,7 @@ public class XPackLicenseState { messages.put(XPackField.CCR, XPackLicenseState::ccrAcknowledgementMessages); messages.put(XPackField.ENTERPRISE_SEARCH, XPackLicenseState::enterpriseSearchAcknowledgementMessages); messages.put(XPackField.REDACT_PROCESSOR, XPackLicenseState::redactProcessorAcknowledgementMessages); + messages.put(XPackField.ESQL, XPackLicenseState::esqlAcknowledgementMessages); ACKNOWLEDGMENT_MESSAGES = Collections.unmodifiableMap(messages); } @@ -243,6 +244,26 @@ private static String[] enterpriseSearchAcknowledgementMessages(OperationMode cu return Strings.EMPTY_ARRAY; } + private static String[] esqlAcknowledgementMessages(OperationMode currentMode, OperationMode newMode) { + /* + * Provide an acknowledgement warning to customers that downgrade from Trial or Enterprise to a lower + * license level (Basic, Standard, Gold or Premium) that they will no longer be able to do CCS in ES|QL. + */ + switch (newMode) { + case BASIC: + case STANDARD: + case GOLD: + case PLATINUM: + switch (currentMode) { + case TRIAL: + case ENTERPRISE: + return new String[] { "ES|QL cross-cluster search will be disabled." }; + } + break; + } + return Strings.EMPTY_ARRAY; + } + private static String[] machineLearningAcknowledgementMessages(OperationMode currentMode, OperationMode newMode) { switch (newMode) { case BASIC: diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java index 311f3484900f2..bc9c3474ee63a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java @@ -20,8 +20,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -73,17 +71,17 @@ public AllocateAction( Map require ) { if (include == null) { - this.include = Collections.emptyMap(); + this.include = Map.of(); } else { this.include = include; } if (exclude == null) { - this.exclude = Collections.emptyMap(); + this.exclude = Map.of(); } else { this.exclude = exclude; } if (require == null) { - this.require = Collections.emptyMap(); + this.require = Map.of(); } else { this.require = require; } @@ -201,7 +199,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { } UpdateSettingsStep allocateStep = new UpdateSettingsStep(allocateKey, allocationRoutedKey, client, newSettings.build()); AllocationRoutedStep routedCheckStep = new AllocationRoutedStep(allocationRoutedKey, nextStepKey); - return Arrays.asList(allocateStep, routedCheckStep); + return List.of(allocateStep, routedCheckStep); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStep.java index 7cdef6207c487..bc3fc0ccae02c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStep.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; -import java.util.Collections; +import java.util.List; import static org.elasticsearch.xpack.core.ilm.step.info.AllocationInfo.allShardsActiveAllocationInfo; import static org.elasticsearch.xpack.core.ilm.step.info.AllocationInfo.waitingForActiveShardsAllocationInfo; @@ -62,7 +62,7 @@ public Result isConditionMet(Index index, ClusterState clusterState) { } AllocationDeciders allocationDeciders = new AllocationDeciders( - Collections.singletonList( + List.of( new FilterAllocationDecider( clusterState.getMetadata().settings(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java index d212492f14d01..8712cefac5d31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.time.Instant; -import java.util.Arrays; import java.util.List; import java.util.Objects; @@ -99,7 +98,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) ); CleanupSnapshotStep cleanupSnapshotStep = new CleanupSnapshotStep(cleanSnapshotKey, deleteStepKey, client); DeleteStep deleteStep = new DeleteStep(deleteStepKey, nextStepKey, client); - return Arrays.asList(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, cleanupSnapshotStep, deleteStep); + return List.of(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, cleanupSnapshotStep, deleteStep); } else { WaitForNoFollowersStep waitForNoFollowersStep = new WaitForNoFollowersStep( waitForNoFollowerStepKey, @@ -113,7 +112,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) client ); DeleteStep deleteStep = new DeleteStep(deleteStepKey, nextStepKey, client); - return Arrays.asList(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, deleteStep); + return List.of(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, deleteStep); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeStep.java index f3afe9e4d52cc..741fff63f61f5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeStep.java @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.Objects; -import java.util.stream.Collectors; /** * Invokes a force merge on a single index. @@ -67,10 +66,7 @@ public void performAction( policyName, failures == null ? "n/a" - : Strings.collectionToDelimitedString( - Arrays.stream(failures).map(Strings::toString).collect(Collectors.toList()), - "," - ), + : Strings.collectionToDelimitedString(Arrays.stream(failures).map(Strings::toString).toList(), ","), NAME ); logger.warn(errorMessage); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/FreezeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/FreezeAction.java index 67763e781e5a5..09e625b96135c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/FreezeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/FreezeAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Arrays; import java.util.List; /** @@ -98,7 +97,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { ); CheckNotDataStreamWriteIndexStep checkNoWriteIndexStep = new CheckNotDataStreamWriteIndexStep(checkNotWriteIndex, freezeStepKey); FreezeStep freezeStep = new FreezeStep(freezeStepKey, nextStepKey, client); - return Arrays.asList(conditionalSkipFreezeStep, checkNoWriteIndexStep, freezeStep); + return List.of(conditionalSkipFreezeStep, checkNoWriteIndexStep, freezeStep); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java index 1a64e589d20b5..6a272b0d2271e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; /** * A utility class used for index lifecycle policies @@ -121,7 +120,7 @@ public static ItemUsage calculateUsage( .stream() .filter(indexMetadata -> policyName.equals(indexMetadata.getLifecyclePolicyName())) .map(indexMetadata -> indexMetadata.getIndex().getName()) - .collect(Collectors.toList()); + .toList(); final List allDataStreams = indexNameExpressionResolver.dataStreamNames( state, @@ -136,12 +135,12 @@ public static ItemUsage calculateUsage( } else { return false; } - }).collect(Collectors.toList()); + }).toList(); final List composableTemplates = state.metadata().templatesV2().keySet().stream().filter(templateName -> { Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), templateName); return policyName.equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(settings)); - }).collect(Collectors.toList()); + }).toList(); return new ItemUsage(indices, dataStreams, composableTemplates); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java index 117abecafeab3..2b03dc77eb5b6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.time.Instant; -import java.util.Arrays; import java.util.List; /** @@ -72,7 +71,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { client ); ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, nextStepKey, client); - return Arrays.asList(checkNotWriteIndexStep, waitUntilTimeSeriesEndTimeStep, readOnlyStep); + return List.of(checkNotWriteIndexStep, waitUntilTimeSeriesEndTimeStep, readOnlyStep); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverAction.java index 515941bce841a..f3c72004d6cc9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Objects; @@ -172,7 +171,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) client, INDEXING_COMPLETE ); - return Arrays.asList(waitForRolloverReadyStep, rolloverStep, waitForActiveShardsStep, updateDateStep, setIndexingCompleteStep); + return List.of(waitForRolloverReadyStep, rolloverStep, waitForActiveShardsStep, updateDateStep, setIndexingCompleteStep); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SegmentCountStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SegmentCountStep.java index ad8f450fb0849..95ca049740c73 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SegmentCountStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SegmentCountStep.java @@ -67,10 +67,7 @@ public void evaluateCondition(Metadata metadata, Index index, Listener listener, response.getFailedShards(), failures == null ? "n/a" - : Strings.collectionToDelimitedString( - Arrays.stream(failures).map(Strings::toString).collect(Collectors.toList()), - "," - ) + : Strings.collectionToDelimitedString(Arrays.stream(failures).map(Strings::toString).toList(), ",") ); listener.onResponse(true, new Info(-1)); } else { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SetPriorityAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SetPriorityAction.java index 376567bc2004c..5f7c1d0c3bf3a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SetPriorityAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SetPriorityAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Objects; @@ -101,7 +100,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { Settings indexPriority = recoveryPriority == null ? NULL_PRIORITY_SETTINGS : Settings.builder().put(IndexMetadata.INDEX_PRIORITY_SETTING.getKey(), recoveryPriority).build(); - return Collections.singletonList(new UpdateSettingsStep(key, nextStepKey, client, indexPriority)); + return List.of(new UpdateSettingsStep(key, nextStepKey, client, indexPriority)); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java index 401d87f853360..70ec5da1d8a2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java @@ -31,7 +31,6 @@ import java.time.Instant; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.xpack.core.ilm.ShrinkIndexNameSupplier.SHRUNKEN_INDEX_PREFIX; @@ -329,7 +328,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) allowWriteAfterShrinkStep ); - return steps.filter(Objects::nonNull).collect(Collectors.toList()); + return steps.filter(Objects::nonNull).toList(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java index 48a0e65bddf22..0fd280f440f39 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java @@ -14,7 +14,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -30,8 +29,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static java.util.stream.Collectors.toList; - /** * Represents the lifecycle of an index from creation to deletion. A * {@link TimeseriesLifecycleType} is made up of a set of {@link Phase}s which it will @@ -114,7 +111,7 @@ public class TimeseriesLifecycleType implements LifecycleType { // Set of actions that cannot be defined (executed) after the managed index has been mounted as searchable snapshot. // It's ordered to produce consistent error messages which can be unit tested. public static final Set ACTIONS_CANNOT_FOLLOW_SEARCHABLE_SNAPSHOT = Collections.unmodifiableSet( - new LinkedHashSet<>(Arrays.asList(ForceMergeAction.NAME, FreezeAction.NAME, ShrinkAction.NAME, DownsampleAction.NAME)) + new LinkedHashSet<>(List.of(ForceMergeAction.NAME, FreezeAction.NAME, ShrinkAction.NAME, DownsampleAction.NAME)) ); private TimeseriesLifecycleType() {} @@ -180,11 +177,11 @@ public static boolean shouldInjectMigrateStepForPhase(Phase phase) { public List getOrderedActions(Phase phase) { Map actions = phase.getActions(); return switch (phase.getName()) { - case HOT_PHASE -> ORDERED_VALID_HOT_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); - case WARM_PHASE -> ORDERED_VALID_WARM_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); - case COLD_PHASE -> ORDERED_VALID_COLD_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); - case FROZEN_PHASE -> ORDERED_VALID_FROZEN_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); - case DELETE_PHASE -> ORDERED_VALID_DELETE_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); + case HOT_PHASE -> ORDERED_VALID_HOT_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); + case WARM_PHASE -> ORDERED_VALID_WARM_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); + case COLD_PHASE -> ORDERED_VALID_COLD_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); + case FROZEN_PHASE -> ORDERED_VALID_FROZEN_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); + case DELETE_PHASE -> ORDERED_VALID_DELETE_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); default -> throw new IllegalArgumentException("lifecycle type [" + TYPE + "] does not support phase [" + phase.getName() + "]"); }; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UnfollowAction.java index 31aaba551a3f3..6bb0178f1471e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UnfollowAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -72,7 +71,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { UnfollowFollowerIndexStep step5 = new UnfollowFollowerIndexStep(unfollowFollowerIndex, openFollowerIndex, client); OpenIndexStep step6 = new OpenIndexStep(openFollowerIndex, waitForYellowStep, client); WaitForIndexColorStep step7 = new WaitForIndexColorStep(waitForYellowStep, nextStepKey, ClusterHealthStatus.YELLOW); - return Arrays.asList(conditionalSkipUnfollowStep, step1, step2, step3, step4, step5, step6, step7); + return List.of(conditionalSkipUnfollowStep, step1, step2, step3, step4, step5, step6, step7); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java index 224319722297c..590890405b8d7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; @@ -78,7 +77,7 @@ static void handleResponse(FollowStatsAction.StatsResponses responses, Listener status.followerGlobalCheckpoint() ) ) - .collect(Collectors.toList()); + .toList(); listener.onResponse(false, new Info(shardFollowTaskInfos)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotAction.java index 08a884f0b8f3c..2633656d7c30c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Objects; @@ -62,7 +61,7 @@ public String getPolicy() { @Override public List toSteps(Client client, String phase, StepKey nextStepKey) { StepKey waitForSnapshotKey = new StepKey(phase, NAME, WaitForSnapshotStep.NAME); - return Collections.singletonList(new WaitForSnapshotStep(waitForSnapshotKey, nextStepKey, client, policy)); + return List.of(new WaitForSnapshotStep(waitForSnapshotKey, nextStepKey, client, policy)); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java index 3a37f94e6b2d4..a40babb2760fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java @@ -64,12 +64,8 @@ public final class MachineLearningField { License.OperationMode.PLATINUM ); - // Ideally this would be 7.0.0, but it has to be 6.4.0 because due to an oversight it's impossible - // for the Java code to distinguish the model states for versions 6.4.0 to 7.9.3 inclusive. - public static final MlConfigVersion MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION = MlConfigVersion.fromString("6.4.0"); - // We tell the user we support model snapshots newer than 7.0.0 as that's the major version - // boundary, even though behind the scenes we have to support back to 6.4.0. - public static final MlConfigVersion MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION = MlConfigVersion.V_7_0_0; + // This is the last version when we changed the ML job snapshot format. + public static final MlConfigVersion MIN_SUPPORTED_SNAPSHOT_VERSION = MlConfigVersion.V_8_3_0; private MachineLearningField() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java index 9704335776f11..d0d5e463f9652 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java @@ -596,6 +596,8 @@ static Map getSSLSettingsMap(Settings settings) { sslSettingsMap.put(WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX, settings.getByPrefix(WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX)); sslSettingsMap.put(XPackSettings.TRANSPORT_SSL_PREFIX, settings.getByPrefix(XPackSettings.TRANSPORT_SSL_PREFIX)); sslSettingsMap.putAll(getTransportProfileSSLSettings(settings)); + // Mount Elastic Inference Service (part of the Inference plugin) configuration + sslSettingsMap.put("xpack.inference.elastic.http.ssl", settings.getByPrefix("xpack.inference.elastic.http.ssl.")); // Only build remote cluster server SSL if the port is enabled if (REMOTE_CLUSTER_SERVER_ENABLED.get(settings)) { sslSettingsMap.put(XPackSettings.REMOTE_CLUSTER_SERVER_SSL_PREFIX, getRemoteClusterServerSslSettings(settings)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index e889d25cd7a96..d788a0b5abd37 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.core.XPackField; import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -59,6 +60,12 @@ void assertAckMessages(String feature, OperationMode from, OperationMode to, int assertEquals(expectedMessages, gotMessages.length); } + void assertAckMessages(String feature, OperationMode from, OperationMode to, Set expectedMessages) { + String[] gotMessages = XPackLicenseState.ACKNOWLEDGMENT_MESSAGES.get(feature).apply(from, to); + Set actualMessages = Arrays.stream(gotMessages).collect(Collectors.toSet()); + assertThat(actualMessages, equalTo(expectedMessages)); + } + static T randomFrom(T[] values, Predicate filter) { return randomFrom(Arrays.stream(values).filter(filter).collect(Collectors.toList())); } @@ -143,6 +150,16 @@ public void testCcrAckTrialOrPlatinumToNotTrialOrPlatinum() { assertAckMessages(XPackField.CCR, randomTrialOrPlatinumMode(), randomBasicStandardOrGold(), 1); } + public void testEsqlAckToTrialOrPlatinum() { + assertAckMessages(XPackField.ESQL, randomMode(), randomFrom(TRIAL, ENTERPRISE), 0); + } + + public void testEsqlAckTrialOrEnterpriseToNotTrialOrEnterprise() { + for (OperationMode to : List.of(BASIC, STANDARD, GOLD, PLATINUM)) { + assertAckMessages(XPackField.ESQL, randomFrom(TRIAL, ENTERPRISE), to, Set.of("ES|QL cross-cluster search will be disabled.")); + } + } + public void testExpiredLicense() { // use standard feature which would normally be allowed at all license levels LicensedFeature feature = LicensedFeature.momentary("family", "enterpriseFeature", STANDARD); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 1f2c89c473a62..d50f7bb27a5df 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -623,7 +623,7 @@ public Map getSnapshotCommitSup } @SuppressWarnings("unchecked") - private List filterPlugins(Class type) { + protected List filterPlugins(Class type) { return plugins.stream().filter(x -> type.isAssignableFrom(x.getClass())).map(p -> ((T) p)).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java index 1fc0afafde353..c5a8185f8511b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -44,20 +43,20 @@ static AllocateAction randomInstance() { includes = randomAllocationRoutingMap(1, 100); hasAtLeastOneMap = true; } else { - includes = randomBoolean() ? null : Collections.emptyMap(); + includes = randomBoolean() ? null : Map.of(); } Map excludes; if (randomBoolean()) { hasAtLeastOneMap = true; excludes = randomAllocationRoutingMap(1, 100); } else { - excludes = randomBoolean() ? null : Collections.emptyMap(); + excludes = randomBoolean() ? null : Map.of(); } Map requires; if (hasAtLeastOneMap == false || randomBoolean()) { requires = randomAllocationRoutingMap(1, 100); } else { - requires = randomBoolean() ? null : Collections.emptyMap(); + requires = randomBoolean() ? null : Map.of(); } Integer numberOfReplicas = randomBoolean() ? null : randomIntBetween(0, 10); Integer totalShardsPerNode = randomBoolean() ? null : randomIntBetween(-1, 10); @@ -97,9 +96,9 @@ protected AllocateAction mutateInstance(AllocateAction instance) { } public void testAllMapsNullOrEmpty() { - Map include = randomBoolean() ? null : Collections.emptyMap(); - Map exclude = randomBoolean() ? null : Collections.emptyMap(); - Map require = randomBoolean() ? null : Collections.emptyMap(); + Map include = randomBoolean() ? null : Map.of(); + Map exclude = randomBoolean() ? null : Map.of(); + Map require = randomBoolean() ? null : Map.of(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> new AllocateAction(null, null, include, exclude, require) @@ -124,8 +123,8 @@ public void testAllMapsNullOrEmpty() { public void testInvalidNumberOfReplicas() { Map include = randomAllocationRoutingMap(1, 5); - Map exclude = randomBoolean() ? null : Collections.emptyMap(); - Map require = randomBoolean() ? null : Collections.emptyMap(); + Map exclude = randomBoolean() ? null : Map.of(); + Map require = randomBoolean() ? null : Map.of(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> new AllocateAction(randomIntBetween(-1000, -1), randomIntBetween(0, 300), include, exclude, require) @@ -135,8 +134,8 @@ public void testInvalidNumberOfReplicas() { public void testInvalidTotalShardsPerNode() { Map include = randomAllocationRoutingMap(1, 5); - Map exclude = randomBoolean() ? null : Collections.emptyMap(); - Map require = randomBoolean() ? null : Collections.emptyMap(); + Map exclude = randomBoolean() ? null : Map.of(); + Map require = randomBoolean() ? null : Map.of(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> new AllocateAction(randomIntBetween(0, 300), randomIntBetween(-1000, -2), include, exclude, require) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java index afad708ddbe2c..708c3630b8b8a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.core.ilm.ClusterStateWaitStep.Result; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Collections; import java.util.Map; import static org.elasticsearch.cluster.routing.TestShardRouting.buildUnassignedInfo; @@ -109,7 +108,7 @@ public void testConditionMet() { public void testRequireConditionMetOnlyOneCopyAllocated() { Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); - Map requires = Collections.singletonMap(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "foo", "bar"); + Map requires = Map.of(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "foo", "bar"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()); @@ -187,7 +186,7 @@ public void testClusterExcludeFiltersConditionMetOnlyOneCopyAllocated() { public void testExcludeConditionMetOnlyOneCopyAllocated() { Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); - Map excludes = Collections.singletonMap(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "foo", "bar"); + Map excludes = Map.of(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "foo", "bar"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()); @@ -218,7 +217,7 @@ public void testExcludeConditionMetOnlyOneCopyAllocated() { public void testIncludeConditionMetOnlyOneCopyAllocated() { Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); - Map includes = Collections.singletonMap(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "foo", "bar"); + Map includes = Map.of(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "foo", "bar"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java index 8dcd8fc7ddd55..72bf7cedb2fb9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java @@ -29,9 +29,9 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.Node; -import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.Type.SIGTERM; import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; @@ -340,7 +340,7 @@ public void testExecuteAllocateReplicaUnassigned() { */ public void testExecuteReplicasNotAllocatedOnSingleNode() { Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); - Map requires = Collections.singletonMap("_id", "node1"); + Map requires = Map.of("_id", "node1"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1") @@ -376,7 +376,7 @@ public void testExecuteReplicasNotAllocatedOnSingleNode() { public void testExecuteReplicasButCopiesNotPresent() { Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); - Map requires = Collections.singletonMap("_id", "node1"); + Map requires = Map.of("_id", "node1"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1") @@ -458,7 +458,7 @@ public void testStepCompletableIfAllShardsActive() { .putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( - Collections.singletonMap( + Map.of( "node1", SingleNodeShutdownMetadata.builder() .setType(type) @@ -537,7 +537,7 @@ public void testStepBecomesUncompletable() { .putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( - Collections.singletonMap( + Map.of( "node1", SingleNodeShutdownMetadata.builder() .setType(type) @@ -649,7 +649,7 @@ public static UnassignedInfo randomUnassignedInfo(String message) { System.currentTimeMillis(), delayed, UnassignedInfo.AllocationStatus.NO_ATTEMPT, - Collections.emptySet(), + Set.of(), lastAllocatedNodeId ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CloseFollowerIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CloseFollowerIndexStepTests.java index 7ce078826b49a..ef7325be0a496 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CloseFollowerIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CloseFollowerIndexStepTests.java @@ -13,7 +13,8 @@ import org.elasticsearch.index.IndexVersion; import org.mockito.Mockito; -import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -24,7 +25,7 @@ public class CloseFollowerIndexStepTests extends AbstractStepTestCase listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(new CloseIndexResponse(true, true, Collections.emptyList())); + listener.onResponse(new CloseIndexResponse(true, true, List.of())); return null; }).when(indicesClient).close(Mockito.any(), Mockito.any()); @@ -54,7 +55,7 @@ public void testRequestNotAcknowledged() { assertThat(closeIndexRequest.indices()[0], equalTo("follower-index")); @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(new CloseIndexResponse(false, false, Collections.emptyList())); + listener.onResponse(new CloseIndexResponse(false, false, List.of())); return null; }).when(indicesClient).close(Mockito.any(), Mockito.any()); @@ -85,7 +86,7 @@ public void testCloseFollowingIndexFailed() { public void testCloseFollowerIndexIsNoopForAlreadyClosedIndex() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .state(IndexMetadata.State.CLOSE) .numberOfShards(1) .numberOfReplicas(0) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CloseIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CloseIndexStepTests.java index 02fb49ac71adf..b546aeaa20687 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CloseIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CloseIndexStepTests.java @@ -20,7 +20,7 @@ import org.mockito.Mockito; import org.mockito.stubbing.Answer; -import java.util.Collections; +import java.util.List; import static org.hamcrest.Matchers.equalTo; @@ -77,9 +77,7 @@ public void testPerformAction() { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; assertThat(request.indices(), equalTo(new String[] { indexMetadata.getIndex().getName() })); - listener.onResponse( - new CloseIndexResponse(true, true, Collections.singletonList(new CloseIndexResponse.IndexResult(indexMetadata.getIndex()))) - ); + listener.onResponse(new CloseIndexResponse(true, true, List.of(new CloseIndexResponse.IndexResult(indexMetadata.getIndex())))); return null; }).when(indicesClient).close(Mockito.any(), Mockito.any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ClusterStateWaitUntilThresholdStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ClusterStateWaitUntilThresholdStepTests.java index f24f0f86de7db..eeddda4199665 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ClusterStateWaitUntilThresholdStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ClusterStateWaitUntilThresholdStepTests.java @@ -20,7 +20,6 @@ import java.time.Clock; import java.time.Instant; import java.time.ZoneId; -import java.util.Collections; import java.util.Map; import java.util.UUID; @@ -83,7 +82,7 @@ public void testIsConditionMetForUnderlyingStep() { .put(LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD, "480h") ) .putCustom(ILM_CUSTOM_METADATA_KEY, Map.of("step_time", String.valueOf(System.currentTimeMillis()))) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -107,7 +106,7 @@ public void testIsConditionMetForUnderlyingStep() { .put(LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD, "48h") ) .putCustom(ILM_CUSTOM_METADATA_KEY, Map.of("step_time", String.valueOf(System.currentTimeMillis()))) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -140,7 +139,7 @@ public void testIsConditionMetForUnderlyingStep() { settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true") .put(LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD, "1s") ) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .putCustom(ILM_CUSTOM_METADATA_KEY, Map.of("step_time", String.valueOf(1234L))) .numberOfShards(1) .numberOfReplicas(0) @@ -168,7 +167,7 @@ public void testIsConditionMetForUnderlyingStep() { settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "false") .put(LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD, "1h") ) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .putCustom(ILM_CUSTOM_METADATA_KEY, Map.of("step_time", String.valueOf(1234L))) .numberOfShards(1) .numberOfReplicas(0) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java index 937502281b64d..c4138d228719e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -81,7 +80,7 @@ protected boolean assertToXContentEquivalence() { protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) + List.of(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeActionTests.java index aecf029a1357a..b8d480200fb5d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeActionTests.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.util.List; -import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; @@ -109,7 +108,7 @@ private void assertBestCompression(ForceMergeAction instance) { // available .skip(1) .map(s -> new Tuple<>(s.getKey(), s.getNextStepKey())) - .collect(Collectors.toList()); + .toList(); StepKey checkNotWriteIndex = new StepKey(phase, ForceMergeAction.NAME, CheckNotDataStreamWriteIndexStep.NAME); StepKey waitTimeSeriesEndTimePassesKey = new StepKey(phase, ForceMergeAction.NAME, WaitUntilTimeSeriesEndTimePassesStep.NAME); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java index bee6351582bc9..908e7b764f136 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; -import java.util.Collections; +import java.util.List; import java.util.Locale; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; @@ -82,7 +82,7 @@ private void testPerformAction(String policyName, String expectedPolicyName) { .metadata( Metadata.builder() .put(indexMetadata, false) - .putCustom(RepositoriesMetadata.TYPE, new RepositoriesMetadata(Collections.singletonList(repo))) + .putCustom(RepositoriesMetadata.TYPE, new RepositoriesMetadata(List.of(repo))) .build() ) .build(); @@ -167,7 +167,7 @@ public void testPerformActionWillOverwriteCachedRepository() { .metadata( Metadata.builder() .put(indexMetadata, false) - .putCustom(RepositoriesMetadata.TYPE, new RepositoriesMetadata(Collections.singletonList(repo))) + .putCustom(RepositoriesMetadata.TYPE, new RepositoriesMetadata(List.of(repo))) .build() ) .build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponseTests.java index ea3c9cc5926ab..6fc98d4c2c728 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponseTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; +import java.util.List; import java.util.Objects; import java.util.function.Supplier; @@ -292,7 +292,7 @@ protected IndexLifecycleExplainResponse mutateInstance(IndexLifecycleExplainResp protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) + List.of(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyClientTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyClientTests.java index 753edfbe334b9..7dd6bfd209660 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyClientTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyClientTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.mockito.Mockito; -import java.util.Collections; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -56,7 +55,7 @@ public void testExecuteWithHeadersAsyncNoHeaders() throws InterruptedException { SearchRequest request = new SearchRequest("foo"); - final var policyClient = new LifecyclePolicySecurityClient(client, ClientHelper.INDEX_LIFECYCLE_ORIGIN, Collections.emptyMap()); + final var policyClient = new LifecyclePolicySecurityClient(client, ClientHelper.INDEX_LIFECYCLE_ORIGIN, Map.of()); policyClient.execute(TransportSearchAction.TYPE, request, listener); latch.await(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyMetadataTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyMetadataTests.java index 3e9fd0105feae..b58d7184f741c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyMetadataTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyMetadataTests.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -37,7 +36,7 @@ public void setup() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList( + List.of( new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new), new NamedWriteableRegistry.Entry( LifecycleType.class, @@ -65,7 +64,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { protected NamedXContentRegistry xContentRegistry() { List entries = new ArrayList<>(ClusterModule.getNamedXWriteables()); entries.addAll( - Arrays.asList( + List.of( new NamedXContentRegistry.Entry( LifecycleType.class, new ParseField(TimeseriesLifecycleType.TYPE), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java index 70f75f1cfcdfa..1bea0ac6d192c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java @@ -20,8 +20,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -30,7 +28,6 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Function; -import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -48,7 +45,7 @@ protected LifecyclePolicy doParseInstance(XContentParser parser) { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList( + List.of( new NamedWriteableRegistry.Entry( LifecycleType.class, TimeseriesLifecycleType.TYPE, @@ -75,7 +72,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { protected NamedXContentRegistry xContentRegistry() { List entries = new ArrayList<>(ClusterModule.getNamedXWriteables()); entries.addAll( - Arrays.asList( + List.of( new NamedXContentRegistry.Entry( LifecycleType.class, new ParseField(TimeseriesLifecycleType.TYPE), @@ -150,7 +147,7 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicy(@Nullable String l ).stream() // Remove the frozen phase, we'll randomly re-add it later .filter(pn -> TimeseriesLifecycleType.FROZEN_PHASE.equals(pn) == false) - .collect(Collectors.toList()); + .toList(); // let's order the phases so we can reason about actions in a previous phase in order to generate a random *valid* policy List orderedPhases = new ArrayList<>(phaseNames.size()); @@ -218,7 +215,7 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicy(@Nullable String l new Phase( TimeseriesLifecycleType.FROZEN_PHASE, frozenTime, - Collections.singletonMap( + Map.of( SearchableSnapshotAction.NAME, new SearchableSnapshotAction( randomAlphaOfLength(10), @@ -300,11 +297,11 @@ protected LifecyclePolicy mutateInstance(LifecyclePolicy instance) { () -> randomFrom( TimeseriesLifecycleType.ORDERED_VALID_PHASES.stream() .filter(pn -> TimeseriesLifecycleType.FROZEN_PHASE.equals(pn) == false) - .collect(Collectors.toList()) + .toList() ) ); phases = new LinkedHashMap<>(phases); - phases.put(phaseName, new Phase(phaseName, null, Collections.emptyMap())); + phases.put(phaseName, new Phase(phaseName, null, Map.of())); } case 2 -> metadata = randomValueOtherThan(metadata, LifecyclePolicyTests::randomMeta); case 3 -> deprecated = instance.isDeprecated() ? randomFrom(false, null) : true; @@ -337,8 +334,8 @@ public void testToStepsWithOneStep() { lifecycleName = randomAlphaOfLengthBetween(1, 20); Map phases = new LinkedHashMap<>(); - LifecycleAction firstAction = new MockAction(Arrays.asList(mockStep)); - Map actions = Collections.singletonMap(MockAction.NAME, firstAction); + LifecycleAction firstAction = new MockAction(List.of(mockStep)); + Map actions = Map.of(MockAction.NAME, firstAction); Phase firstPhase = new Phase("test", TimeValue.ZERO, actions); phases.put(firstPhase.getName(), firstPhase); LifecyclePolicy policy = new LifecyclePolicy(TestLifecycleType.INSTANCE, lifecycleName, phases, randomMeta()); @@ -372,10 +369,10 @@ public void testToStepsWithTwoPhases() { lifecycleName = randomAlphaOfLengthBetween(1, 20); Map phases = new LinkedHashMap<>(); - LifecycleAction firstAction = new MockAction(Arrays.asList(firstActionStep, firstActionAnotherStep)); - LifecycleAction secondAction = new MockAction(Arrays.asList(secondActionStep)); - Map firstActions = Collections.singletonMap(MockAction.NAME, firstAction); - Map secondActions = Collections.singletonMap(MockAction.NAME, secondAction); + LifecycleAction firstAction = new MockAction(List.of(firstActionStep, firstActionAnotherStep)); + LifecycleAction secondAction = new MockAction(List.of(secondActionStep)); + Map firstActions = Map.of(MockAction.NAME, firstAction); + Map secondActions = Map.of(MockAction.NAME, secondAction); Phase firstPhase = new Phase("first_phase", TimeValue.ZERO, firstActions); Phase secondPhase = new Phase("second_phase", TimeValue.ZERO, secondActions); phases.put(firstPhase.getName(), firstPhase); @@ -401,10 +398,10 @@ public void testToStepsWithTwoPhases() { public void testIsActionSafe() { Map phases = new LinkedHashMap<>(); - LifecycleAction firstAction = new MockAction(Collections.emptyList(), true); - LifecycleAction secondAction = new MockAction(Collections.emptyList(), false); - Map firstActions = Collections.singletonMap(MockAction.NAME, firstAction); - Map secondActions = Collections.singletonMap(MockAction.NAME, secondAction); + LifecycleAction firstAction = new MockAction(List.of(), true); + LifecycleAction secondAction = new MockAction(List.of(), false); + Map firstActions = Map.of(MockAction.NAME, firstAction); + Map secondActions = Map.of(MockAction.NAME, secondAction); Phase firstPhase = new Phase("first_phase", TimeValue.ZERO, firstActions); Phase secondPhase = new Phase("second_phase", TimeValue.ZERO, secondActions); phases.put(firstPhase.getName(), firstPhase); @@ -458,12 +455,9 @@ public void testValidatePolicyName() { public static Map randomMeta() { if (randomBoolean()) { if (randomBoolean()) { - return Collections.singletonMap(randomAlphaOfLength(4), randomAlphaOfLength(4)); + return Map.of(randomAlphaOfLength(4), randomAlphaOfLength(4)); } else { - return Collections.singletonMap( - randomAlphaOfLength(5), - Collections.singletonMap(randomAlphaOfLength(4), randomAlphaOfLength(4)) - ); + return Map.of(randomAlphaOfLength(5), Map.of(randomAlphaOfLength(4), randomAlphaOfLength(4))); } } else { return null; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java index 3efe2dc04ea19..978486c6c0d39 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java @@ -23,8 +23,8 @@ import org.elasticsearch.indices.EmptySystemIndices; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; -import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -40,7 +40,7 @@ public void testCalculateUsage() { ClusterState state = ClusterState.builder(new ClusterName("mycluster")).build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, state, "mypolicy"), - equalTo(new ItemUsage(Collections.emptyList(), Collections.emptyList(), Collections.emptyList())) + equalTo(new ItemUsage(List.of(), List.of(), List.of())) ); } @@ -52,7 +52,7 @@ public void testCalculateUsage() { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), + Map.of("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), OperationMode.RUNNING ) ) @@ -61,7 +61,7 @@ public void testCalculateUsage() { .build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, state, "mypolicy"), - equalTo(new ItemUsage(Collections.emptyList(), Collections.emptyList(), Collections.emptyList())) + equalTo(new ItemUsage(List.of(), List.of(), List.of())) ); } @@ -73,7 +73,7 @@ public void testCalculateUsage() { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), + Map.of("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), OperationMode.RUNNING ) ) @@ -86,7 +86,7 @@ public void testCalculateUsage() { .build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, state, "mypolicy"), - equalTo(new ItemUsage(Collections.singleton("myindex"), Collections.emptyList(), Collections.emptyList())) + equalTo(new ItemUsage(List.of("myindex"), List.of(), List.of())) ); } @@ -98,7 +98,7 @@ public void testCalculateUsage() { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), + Map.of("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), OperationMode.RUNNING ) ) @@ -109,10 +109,10 @@ public void testCalculateUsage() { .putCustom( ComposableIndexTemplateMetadata.TYPE, new ComposableIndexTemplateMetadata( - Collections.singletonMap( + Map.of( "mytemplate", ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList("myds")) + .indexPatterns(List.of("myds")) .template( new Template( Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy").build(), @@ -130,7 +130,7 @@ public void testCalculateUsage() { .build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, state, "mypolicy"), - equalTo(new ItemUsage(Collections.singleton("myindex"), Collections.emptyList(), Collections.singleton("mytemplate"))) + equalTo(new ItemUsage(List.of("myindex"), List.of(), List.of("mytemplate"))) ); } @@ -139,7 +139,7 @@ public void testCalculateUsage() { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), + Map.of("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), OperationMode.RUNNING ) ) @@ -159,10 +159,10 @@ public void testCalculateUsage() { .putCustom( ComposableIndexTemplateMetadata.TYPE, new ComposableIndexTemplateMetadata( - Collections.singletonMap( + Map.of( "mytemplate", ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList("myds")) + .indexPatterns(List.of("myds")) .template( new Template(Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy").build(), null, null) ) @@ -172,15 +172,13 @@ public void testCalculateUsage() { ) ); // Need to get the real Index instance of myindex: - mBuilder.put(DataStreamTestHelper.newInstance("myds", Collections.singletonList(mBuilder.get("myindex").getIndex()))); + mBuilder.put(DataStreamTestHelper.newInstance("myds", List.of(mBuilder.get("myindex").getIndex()))); // Test where policy exists and is used by an index, datastream, and template ClusterState state = ClusterState.builder(new ClusterName("mycluster")).metadata(mBuilder.build()).build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, state, "mypolicy"), - equalTo( - new ItemUsage(Arrays.asList("myindex", "another"), Collections.singleton("myds"), Collections.singleton("mytemplate")) - ) + equalTo(new ItemUsage(List.of("myindex", "another"), List.of("myds"), List.of("mytemplate"))) ); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MockAction.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MockAction.java index 0de234615f4c7..79f8a051abe25 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MockAction.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MockAction.java @@ -15,10 +15,8 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; public class MockAction implements LifecycleAction { public static final String NAME = "TEST_ACTION"; @@ -32,7 +30,7 @@ public static MockAction parse(XContentParser parser) { } public MockAction() { - this(Collections.emptyList()); + this(List.of()); } public MockAction(List steps) { @@ -77,7 +75,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) @Override public void writeTo(StreamOutput out) throws IOException { - out.writeCollection(steps.stream().map(MockStep::new).collect(Collectors.toList())); + out.writeCollection(steps.stream().map(MockStep::new).toList()); out.writeBoolean(safe); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTaskTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTaskTests.java index 9871cb79b595b..475161676f2e8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTaskTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTaskTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.core.slm.SnapshotLifecycleMetadata; import org.elasticsearch.xpack.core.slm.SnapshotLifecycleStats; -import java.util.Collections; import java.util.Map; import java.util.Optional; @@ -97,9 +96,9 @@ private OperationMode executeILMUpdate( OperationMode requestMode, boolean assertSameClusterState ) { - IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(Collections.emptyMap(), currentMode); + IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(Map.of(), currentMode); SnapshotLifecycleMetadata snapshotLifecycleMetadata = new SnapshotLifecycleMetadata( - Collections.emptyMap(), + Map.of(), currentMode, new SnapshotLifecycleStats() ); @@ -131,9 +130,9 @@ private OperationMode executeSLMUpdate( OperationMode requestMode, boolean assertSameClusterState ) { - IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(Collections.emptyMap(), currentMode); + IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(Map.of(), currentMode); SnapshotLifecycleMetadata snapshotLifecycleMetadata = new SnapshotLifecycleMetadata( - Collections.emptyMap(), + Map.of(), currentMode, new SnapshotLifecycleStats() ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PauseFollowerIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PauseFollowerIndexStepTests.java index 51ebc98176955..da5d6eddfc72d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PauseFollowerIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PauseFollowerIndexStepTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.xpack.core.ccr.action.ShardFollowTask; import org.mockito.Mockito; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -38,7 +38,7 @@ protected PauseFollowerIndexStep newInstance(Step.StepKey key, Step.StepKey next public void testPauseFollowingIndex() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -60,7 +60,7 @@ public void testPauseFollowingIndex() throws Exception { public void testRequestNotAcknowledged() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -81,7 +81,7 @@ public void testRequestNotAcknowledged() { public void testPauseFollowingIndexFailed() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -107,7 +107,7 @@ public void testPauseFollowingIndexFailed() { public final void testNoShardFollowPersistentTasks() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("managed-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -138,7 +138,7 @@ public final void testNoShardFollowTasksForManagedIndex() throws Exception { IndexMetadata followerIndex = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current())) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -171,7 +171,7 @@ private static ClusterState setupClusterStateWithFollowingIndex(IndexMetadata fo new ByteSizeValue(512, ByteSizeUnit.MB), TimeValue.timeValueMillis(10), TimeValue.timeValueMillis(10), - Collections.emptyMap() + Map.of() ), null ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagementTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagementTests.java index 952741032fc90..7e78a81776a7a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagementTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagementTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xcontent.ParseField; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -84,9 +83,9 @@ public void testRefreshPhaseJson() throws IOException { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), 2L, 2L); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Map.of(), 2L, 2L); ClusterState existingState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder(Metadata.EMPTY_METADATA).put(meta, false).build()) @@ -315,7 +314,7 @@ public void testIndexCanBeSafelyUpdated() { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertTrue(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -351,7 +350,7 @@ public void testIndexCanBeSafelyUpdated() { Map actions = new HashMap<>(); actions.put("set_priority", new SetPriorityAction(150)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertFalse(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -390,7 +389,7 @@ public void testIndexCanBeSafelyUpdated() { new RolloverAction(null, null, TimeValue.timeValueSeconds(5), null, null, null, null, null, null, null) ); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertFalse(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -422,7 +421,7 @@ public void testIndexCanBeSafelyUpdated() { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertFalse(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -443,7 +442,7 @@ public void testIndexCanBeSafelyUpdated() { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertFalse(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -482,16 +481,16 @@ public void testUpdateIndicesForPolicy() throws IOException { oldActions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); oldActions.put("set_priority", new SetPriorityAction(100)); Phase oldHotPhase = new Phase("hot", TimeValue.ZERO, oldActions); - Map oldPhases = Collections.singletonMap("hot", oldHotPhase); + Map oldPhases = Map.of("hot", oldHotPhase); LifecyclePolicy oldPolicy = new LifecyclePolicy("my-policy", oldPhases); Map actions = new HashMap<>(); actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), 2L, 2L); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Map.of(), 2L, 2L); assertTrue(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -509,9 +508,9 @@ public void testUpdateIndicesForPolicy() throws IOException { actions.put("rollover", new RolloverAction(null, null, null, 2L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(150)); hotPhase = new Phase("hot", TimeValue.ZERO, actions); - phases = Collections.singletonMap("hot", hotPhase); + phases = Map.of("hot", hotPhase); newPolicy = new LifecyclePolicy("my-policy", phases); - policyMetadata = new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), 2L, 2L); + policyMetadata = new LifecyclePolicyMetadata(newPolicy, Map.of(), 2L, 2L); logger.info("--> update with changed policy, but not configured in settings"); updatedState = updateIndicesForPolicy(existingState, REGISTRY, client, oldPolicy, policyMetadata, null); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfoTests.java index 7622118d2b99f..ce477a07c2f0b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfoTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfoTests.java @@ -18,7 +18,7 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; +import java.util.List; public class PhaseExecutionInfoTests extends AbstractXContentSerializingTestCase { @@ -71,7 +71,7 @@ protected PhaseExecutionInfo mutateInstance(PhaseExecutionInfo instance) { protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) + List.of(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseTests.java index bf925c4282fc1..5a194b48f7701 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseTests.java @@ -18,9 +18,8 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -42,9 +41,9 @@ static Phase randomTestPhase(String phaseName) { if (randomBoolean()) { after = randomTimeValue(0, 1_000_000_000, TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS); } - Map actions = Collections.emptyMap(); + Map actions = Map.of(); if (randomBoolean()) { - actions = Collections.singletonMap(MockAction.NAME, new MockAction()); + actions = Map.of(MockAction.NAME, new MockAction()); } return new Phase(phaseName, after, actions); } @@ -61,7 +60,7 @@ protected Reader instanceReader() { protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) + List.of(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) ); } @@ -85,7 +84,7 @@ protected Phase mutateInstance(Phase instance) { case 1 -> after = TimeValue.timeValueSeconds(after.getSeconds() + randomIntBetween(1, 1000)); case 2 -> { actions = new HashMap<>(actions); - actions.put(MockAction.NAME + "another", new MockAction(Collections.emptyList())); + actions.put(MockAction.NAME + "another", new MockAction(List.of())); } default -> throw new AssertionError("Illegal randomisation branch"); } @@ -93,7 +92,7 @@ protected Phase mutateInstance(Phase instance) { } public void testDefaultAfter() { - Phase phase = new Phase(randomAlphaOfLength(20), null, Collections.emptyMap()); + Phase phase = new Phase(randomAlphaOfLength(20), null, Map.of()); assertEquals(TimeValue.ZERO, phase.getMinimumAge()); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java index 4af25d094f5fe..3683690763d93 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java @@ -23,9 +23,9 @@ import org.hamcrest.Matchers; import org.mockito.Mockito; -import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; import static org.mockito.Mockito.verifyNoMoreInteractions; @@ -185,7 +185,7 @@ private void mockClientRolloverCall(String rolloverTarget) { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; assertRolloverIndexRequest(request, rolloverTarget); - listener.onResponse(new RolloverResponse(null, null, Collections.emptyMap(), request.isDryRun(), true, true, true, false)); + listener.onResponse(new RolloverResponse(null, null, Map.of(), request.isDryRun(), true, true, true, false)); return null; }).when(indicesClient).rolloverIndex(Mockito.any(), Mockito.any()); } @@ -214,11 +214,7 @@ public void testPerformActionSkipsRolloverForAlreadyRolledIndex() throws Excepti .putAlias(AliasMetadata.builder(rolloverAlias)) .settings(settings(IndexVersion.current()).put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, rolloverAlias)) .putRolloverInfo( - new RolloverInfo( - rolloverAlias, - Collections.singletonList(new MaxSizeCondition(ByteSizeValue.ofBytes(2L))), - System.currentTimeMillis() - ) + new RolloverInfo(rolloverAlias, List.of(new MaxSizeCondition(ByteSizeValue.ofBytes(2L))), System.currentTimeMillis()) ) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepTests.java index 1d14bfb261fc2..9f04e202022c9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepTests.java @@ -77,14 +77,14 @@ public void testIsConditionMet() { ShardSegments shardSegmentsOne = Mockito.mock(ShardSegments.class); ShardSegments[] shardSegmentsArray = new ShardSegments[] { shardSegmentsOne }; IndexShardSegments indexShardSegments = new IndexShardSegments(ShardId.fromString("[idx][123]"), shardSegmentsArray); - Map indexShards = Collections.singletonMap(0, indexShardSegments); + Map indexShards = Map.of(0, indexShardSegments); Spliterator iss = indexShards.values().spliterator(); List segments = new ArrayList<>(); for (int i = 0; i < maxNumSegments - randomIntBetween(0, 3); i++) { segments.add(null); } Mockito.when(indicesSegmentResponse.getStatus()).thenReturn(RestStatus.OK); - Mockito.when(indicesSegmentResponse.getIndices()).thenReturn(Collections.singletonMap(index.getName(), indexSegments)); + Mockito.when(indicesSegmentResponse.getIndices()).thenReturn(Map.of(index.getName(), indexSegments)); Mockito.when(indexSegments.spliterator()).thenReturn(iss); Mockito.when(shardSegmentsOne.getSegments()).thenReturn(segments); @@ -129,14 +129,14 @@ public void testIsConditionIsTrueEvenWhenMoreSegments() { ShardSegments shardSegmentsOne = Mockito.mock(ShardSegments.class); ShardSegments[] shardSegmentsArray = new ShardSegments[] { shardSegmentsOne }; IndexShardSegments indexShardSegments = new IndexShardSegments(ShardId.fromString("[idx][123]"), shardSegmentsArray); - Map indexShards = Collections.singletonMap(0, indexShardSegments); + Map indexShards = Map.of(0, indexShardSegments); Spliterator iss = indexShards.values().spliterator(); List segments = new ArrayList<>(); for (int i = 0; i < maxNumSegments + randomIntBetween(1, 3); i++) { segments.add(null); } Mockito.when(indicesSegmentResponse.getStatus()).thenReturn(RestStatus.OK); - Mockito.when(indicesSegmentResponse.getIndices()).thenReturn(Collections.singletonMap(index.getName(), indexSegments)); + Mockito.when(indicesSegmentResponse.getIndices()).thenReturn(Map.of(index.getName(), indexSegments)); Mockito.when(indexSegments.spliterator()).thenReturn(iss); Mockito.when(shardSegmentsOne.getSegments()).thenReturn(segments); @@ -181,7 +181,7 @@ public void testFailedToRetrieveSomeSegments() { ShardSegments shardSegmentsOne = Mockito.mock(ShardSegments.class); ShardSegments[] shardSegmentsArray = new ShardSegments[] { shardSegmentsOne }; IndexShardSegments indexShardSegments = new IndexShardSegments(ShardId.fromString("[idx][123]"), shardSegmentsArray); - Map indexShards = Collections.singletonMap(0, indexShardSegments); + Map indexShards = Map.of(0, indexShardSegments); Spliterator iss = indexShards.values().spliterator(); List segments = new ArrayList<>(); for (int i = 0; i < maxNumSegments + randomIntBetween(1, 3); i++) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java index a33d6e3332a40..60fa69708e111 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java @@ -28,8 +28,8 @@ import org.mockito.Mockito; import java.io.IOException; -import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -203,11 +203,11 @@ public void assertPerformAction( LifecyclePolicy policy = new LifecyclePolicy( lifecycleName, - Collections.singletonMap("warm", new Phase("warm", TimeValue.ZERO, Collections.singletonMap(action.getWriteableName(), action))) + Map.of("warm", new Phase("warm", TimeValue.ZERO, Map.of(action.getWriteableName(), action))) ); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -216,10 +216,7 @@ public void assertPerformAction( Metadata.builder() .putCustom( IndexLifecycleMetadata.TYPE, - new IndexLifecycleMetadata( - Collections.singletonMap(policyMetadata.getName(), policyMetadata), - OperationMode.RUNNING - ) + new IndexLifecycleMetadata(Map.of(policyMetadata.getName(), policyMetadata), OperationMode.RUNNING) ) .put( indexMetadataBuilder.putCustom( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java index 7a03343b461de..c8efce288260f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java @@ -17,7 +17,6 @@ import org.mockito.Mockito; import org.mockito.stubbing.Answer; -import java.util.Arrays; import java.util.List; import static org.elasticsearch.xpack.core.ilm.ShrinkIndexNameSupplier.SHRUNKEN_INDEX_PREFIX; @@ -73,7 +72,7 @@ public void testPerformAction() throws Exception { String sourceIndex = indexMetadata.getIndex().getName(); String shrunkenIndex = SHRUNKEN_INDEX_PREFIX + sourceIndex; - List expectedAliasActions = Arrays.asList( + List expectedAliasActions = List.of( IndicesAliasesRequest.AliasActions.removeIndex().index(sourceIndex), IndicesAliasesRequest.AliasActions.add().index(shrunkenIndex).alias(sourceIndex), IndicesAliasesRequest.AliasActions.add() diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java index 257df32b4d950..b138339c25197 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java @@ -21,8 +21,8 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.mockito.Mockito; -import java.util.Collections; import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; import static org.elasticsearch.common.IndexNameGenerator.generateValidIndexName; @@ -101,7 +101,7 @@ public void testPerformAction() throws Exception { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; assertThat(request.getSourceIndex(), equalTo(sourceIndexMetadata.getIndex().getName())); - assertThat(request.getTargetIndexRequest().aliases(), equalTo(Collections.emptySet())); + assertThat(request.getTargetIndexRequest().aliases(), equalTo(Set.of())); Settings.Builder builder = Settings.builder(); builder.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, sourceIndexMetadata.getNumberOfReplicas()) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStepTests.java index f9f06b10ad2f9..1a99043b86ad7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStepTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Arrays; import java.util.List; import java.util.function.BiFunction; @@ -92,7 +91,7 @@ public void testPerformAction() { String targetIndexPrefix = "index_prefix"; String targetIndexName = targetIndexPrefix + sourceIndexName; - List expectedAliasActions = Arrays.asList( + List expectedAliasActions = List.of( AliasActions.removeIndex().index(sourceIndexName), AliasActions.add().index(targetIndexName).alias(sourceIndexName), AliasActions.add() diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java index 55fa3792fa6c7..f7d1ff5294f58 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java @@ -13,9 +13,7 @@ import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -51,13 +49,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { - private static final AllocateAction TEST_ALLOCATE_ACTION = new AllocateAction( - 2, - 20, - Collections.singletonMap("node", "node1"), - null, - null - ); + private static final AllocateAction TEST_ALLOCATE_ACTION = new AllocateAction(2, 20, Map.of("node", "node1"), null, null); private static final DeleteAction TEST_DELETE_ACTION = DeleteAction.WITH_SNAPSHOT_DELETE; private static final WaitForSnapshotAction TEST_WAIT_FOR_SNAPSHOT_ACTION = new WaitForSnapshotAction("policy"); @@ -91,7 +83,7 @@ public void testValidatePhases() { if (invalid) { phaseName += randomAlphaOfLength(5); } - Map phases = Collections.singletonMap(phaseName, new Phase(phaseName, TimeValue.ZERO, Collections.emptyMap())); + Map phases = Map.of(phaseName, new Phase(phaseName, TimeValue.ZERO, Map.of())); if (invalid) { Exception e = expectThrows(IllegalArgumentException.class, () -> TimeseriesLifecycleType.INSTANCE.validate(phases.values())); assertThat(e.getMessage(), equalTo("Timeseries lifecycle does not support phase [" + phaseName + "]")); @@ -109,7 +101,7 @@ public void testValidateHotPhase() { invalidAction = getTestAction(randomFrom("allocate", "delete", "freeze")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map hotPhase = Collections.singletonMap("hot", new Phase("hot", TimeValue.ZERO, actions)); + Map hotPhase = Map.of("hot", new Phase("hot", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows(IllegalArgumentException.class, () -> TimeseriesLifecycleType.INSTANCE.validate(hotPhase.values())); @@ -123,14 +115,14 @@ public void testValidateHotPhase() { final Map hotActionMap = hotActions.stream() .map(this::getTestAction) .collect(Collectors.toMap(LifecycleAction::getWriteableName, Function.identity())); - TimeseriesLifecycleType.INSTANCE.validate(Collections.singleton(new Phase("hot", TimeValue.ZERO, hotActionMap))); + TimeseriesLifecycleType.INSTANCE.validate(List.of(new Phase("hot", TimeValue.ZERO, hotActionMap))); }; - validateHotActions.accept(Arrays.asList(RolloverAction.NAME)); - validateHotActions.accept(Arrays.asList(RolloverAction.NAME, ForceMergeAction.NAME)); + validateHotActions.accept(List.of(RolloverAction.NAME)); + validateHotActions.accept(List.of(RolloverAction.NAME, ForceMergeAction.NAME)); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> validateHotActions.accept(Arrays.asList(ForceMergeAction.NAME)) + () -> validateHotActions.accept(List.of(ForceMergeAction.NAME)) ); assertThat( e.getMessage(), @@ -148,7 +140,7 @@ public void testValidateWarmPhase() { invalidAction = getTestAction(randomFrom("rollover", "delete", "freeze")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map warmPhase = Collections.singletonMap("warm", new Phase("warm", TimeValue.ZERO, actions)); + Map warmPhase = Map.of("warm", new Phase("warm", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows(IllegalArgumentException.class, () -> TimeseriesLifecycleType.INSTANCE.validate(warmPhase.values())); @@ -167,7 +159,7 @@ public void testValidateColdPhase() { invalidAction = getTestAction(randomFrom("rollover", "delete", "forcemerge", "shrink")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map coldPhase = Collections.singletonMap("cold", new Phase("cold", TimeValue.ZERO, actions)); + Map coldPhase = Map.of("cold", new Phase("cold", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows(IllegalArgumentException.class, () -> TimeseriesLifecycleType.INSTANCE.validate(coldPhase.values())); @@ -188,7 +180,7 @@ public void testValidateFrozenPhase() { invalidAction = getTestAction(randomFrom("rollover", "delete", "forcemerge", "shrink")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map frozenPhase = Collections.singletonMap("frozen", new Phase("frozen", TimeValue.ZERO, actions)); + Map frozenPhase = Map.of("frozen", new Phase("frozen", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows( @@ -210,7 +202,7 @@ public void testValidateDeletePhase() { invalidAction = getTestAction(randomFrom("allocate", "rollover", "forcemerge", "shrink", "freeze", "set_priority")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map deletePhase = Collections.singletonMap("delete", new Phase("delete", TimeValue.ZERO, actions)); + Map deletePhase = Map.of("delete", new Phase("delete", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows( @@ -459,7 +451,7 @@ public void testValidateDownsamplingAction() { public void testGetOrderedPhases() { Map phaseMap = new HashMap<>(); for (String phaseName : randomSubsetOf(randomIntBetween(0, ORDERED_VALID_PHASES.size()), ORDERED_VALID_PHASES)) { - phaseMap.put(phaseName, new Phase(phaseName, TimeValue.ZERO, Collections.emptyMap())); + phaseMap.put(phaseName, new Phase(phaseName, TimeValue.ZERO, Map.of())); } assertTrue(isSorted(TimeseriesLifecycleType.INSTANCE.getOrderedPhases(phaseMap), Phase::getName, ORDERED_VALID_PHASES)); @@ -509,7 +501,7 @@ private boolean isUnfollowInjected(String phaseName, String actionName) { public void testGetOrderedActionsInvalidPhase() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> TimeseriesLifecycleType.INSTANCE.getOrderedActions(new Phase("invalid", TimeValue.ZERO, Collections.emptyMap())) + () -> TimeseriesLifecycleType.INSTANCE.getOrderedActions(new Phase("invalid", TimeValue.ZERO, Map.of())) ); assertThat(exception.getMessage(), equalTo("lifecycle type [timeseries] does not support phase [invalid]")); } @@ -583,25 +575,25 @@ public void testShouldMigrateDataToTiers() { { // not inject in hot phase - Phase phase = new Phase(HOT_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase phase = new Phase(HOT_PHASE, TimeValue.ZERO, Map.of()); assertThat(TimeseriesLifecycleType.shouldInjectMigrateStepForPhase(phase), is(false)); } { // not inject in frozen phase - Phase phase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase phase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Map.of()); assertThat(TimeseriesLifecycleType.shouldInjectMigrateStepForPhase(phase), is(false)); } { // not inject in delete phase - Phase phase = new Phase(DELETE_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase phase = new Phase(DELETE_PHASE, TimeValue.ZERO, Map.of()); assertThat(TimeseriesLifecycleType.shouldInjectMigrateStepForPhase(phase), is(false)); } { // return false for invalid phase - Phase phase = new Phase(HOT_PHASE + randomAlphaOfLength(5), TimeValue.ZERO, Collections.emptyMap()); + Phase phase = new Phase(HOT_PHASE + randomAlphaOfLength(5), TimeValue.ZERO, Map.of()); assertThat(TimeseriesLifecycleType.shouldInjectMigrateStepForPhase(phase), is(false)); } } @@ -620,7 +612,7 @@ public void testValidatingSearchableSnapshotRepos() { Phase coldPhase = new Phase(HOT_PHASE, TimeValue.ZERO, coldActions); Phase frozenPhase = new Phase(HOT_PHASE, TimeValue.ZERO, frozenActions); - validateAllSearchableSnapshotActionsUseSameRepository(Arrays.asList(hotPhase, coldPhase, frozenPhase)); + validateAllSearchableSnapshotActionsUseSameRepository(List.of(hotPhase, coldPhase, frozenPhase)); } { @@ -634,7 +626,7 @@ public void testValidatingSearchableSnapshotRepos() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> validateAllSearchableSnapshotActionsUseSameRepository(Arrays.asList(hotPhase, coldPhase, frozenPhase)) + () -> validateAllSearchableSnapshotActionsUseSameRepository(List.of(hotPhase, coldPhase, frozenPhase)) ); assertThat( e.getMessage(), @@ -649,25 +641,25 @@ public void testValidatingSearchableSnapshotRepos() { public void testValidatingIncreasingAges() { { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.ZERO, Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, TimeValue.ZERO, Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.ZERO, Map.of()); assertFalse( Strings.hasText( - validateMonotonicallyIncreasingPhaseTimings(Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)) + validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)) ) ); } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Map.of()); List phases = new ArrayList<>(); phases.add(hotPhase); @@ -687,15 +679,13 @@ public void testValidatingIncreasingAges() { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueHours(12), Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.ZERO, Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueHours(12), Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.ZERO, Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -708,15 +698,13 @@ public void testValidatingIncreasingAges() { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(3), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, null, Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(3), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, null, Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(2), Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -729,15 +717,13 @@ public void testValidatingIncreasingAges() { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(3), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, null, Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(3), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, null, Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -750,15 +736,13 @@ public void testValidatingIncreasingAges() { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(3), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, null, Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(3), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, null, Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -772,15 +756,13 @@ public void testValidatingIncreasingAges() { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(3), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(3), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -799,7 +781,7 @@ public void testValidateFrozenPhaseHasSearchableSnapshot() { Map frozenActions = new HashMap<>(); frozenActions.put(SearchableSnapshotAction.NAME, new SearchableSnapshotAction("repo1", randomBoolean())); Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, frozenActions); - validateFrozenPhaseHasSearchableSnapshotAction(Collections.singleton(frozenPhase)); + validateFrozenPhaseHasSearchableSnapshotAction(List.of(frozenPhase)); } { @@ -807,7 +789,7 @@ public void testValidateFrozenPhaseHasSearchableSnapshot() { Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, frozenActions); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> validateFrozenPhaseHasSearchableSnapshotAction(Collections.singleton(frozenPhase)) + () -> validateFrozenPhaseHasSearchableSnapshotAction(List.of(frozenPhase)) ); assertThat( e.getMessage(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowFollowerIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowFollowerIndexStepTests.java index 71f7ea2925f16..8e40d3af86d81 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowFollowerIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowFollowerIndexStepTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; import org.mockito.Mockito; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -30,7 +30,7 @@ protected UnfollowFollowerIndexStep newInstance(Step.StepKey key, Step.StepKey n public void testUnFollow() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -51,7 +51,7 @@ public void testUnFollow() throws Exception { public void testRequestNotAcknowledged() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -71,7 +71,7 @@ public void testRequestNotAcknowledged() { public void testUnFollowUnfollowFailed() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -93,7 +93,7 @@ public void testUnFollowUnfollowFailed() { public void testFailureToReleaseRetentionLeases() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStepTests.java index e4bcfd88737f2..3ede4d7668cd0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStepTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Collections; import java.util.List; import java.util.function.LongSupplier; @@ -68,7 +67,7 @@ public void testPerformAction() { .numberOfReplicas(randomIntBetween(0, 5)) .build(); IndexMetadata indexMetadata = IndexMetadata.builder(randomAlphaOfLength(10)) - .putRolloverInfo(new RolloverInfo(alias, Collections.emptyList(), rolloverTime)) + .putRolloverInfo(new RolloverInfo(alias, List.of(), rolloverTime)) .settings(settings(IndexVersion.current()).put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) @@ -88,7 +87,7 @@ public void testPerformActionOnDataStream() { long rolloverTime = randomValueOtherThan(creationDate, () -> randomNonNegativeLong()); String dataStreamName = "test-datastream"; IndexMetadata originalIndexMeta = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1)) - .putRolloverInfo(new RolloverInfo(dataStreamName, Collections.emptyList(), rolloverTime)) + .putRolloverInfo(new RolloverInfo(dataStreamName, List.of(), rolloverTime)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForDataTierStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForDataTierStepTests.java index 00012575ea5de..2635e14b52eb4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForDataTierStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForDataTierStepTests.java @@ -64,9 +64,7 @@ protected WaitForDataTierStep copyInstance(WaitForDataTierStep instance) { public void testConditionMet() { String notIncludedTier = randomFrom(DataTier.ALL_DATA_TIERS); - List otherTiers = DataTier.ALL_DATA_TIERS.stream() - .filter(tier -> notIncludedTier.equals(tier) == false) - .collect(Collectors.toList()); + List otherTiers = DataTier.ALL_DATA_TIERS.stream().filter(tier -> notIncludedTier.equals(tier) == false).toList(); List includedTiers = randomSubsetOf(between(1, otherTiers.size()), otherTiers); String tierPreference = String.join(",", includedTiers); WaitForDataTierStep step = new WaitForDataTierStep(randomStepKey(), randomStepKey(), tierPreference); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStepTests.java index 4ac5511a247c9..ba94508667776 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStepTests.java @@ -16,9 +16,9 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.mockito.Mockito; -import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -57,11 +57,11 @@ protected WaitForFollowShardTasksStep copyInstance(WaitForFollowShardTasksStep i public void testConditionMet() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(2) .numberOfReplicas(0) .build(); - List statsResponses = Arrays.asList( + List statsResponses = List.of( new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(0, 9, 9)), new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(1, 3, 3)) ); @@ -96,11 +96,11 @@ public void onFailure(Exception e) { public void testConditionNotMetShardsNotInSync() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(2) .numberOfReplicas(0) .build(); - List statsResponses = Arrays.asList( + List statsResponses = List.of( new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(0, 9, 9)), new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(1, 8, 3)) ); @@ -214,7 +214,7 @@ private void mockFollowStatsCall(String expectedIndexName, List listener = (ActionListener) invocationOnMock .getArguments()[2]; - listener.onResponse(new FollowStatsAction.StatsResponses(Collections.emptyList(), Collections.emptyList(), statsResponses)); + listener.onResponse(new FollowStatsAction.StatsResponses(List.of(), List.of(), statsResponses)); return null; }).when(client).execute(Mockito.eq(FollowStatsAction.INSTANCE), Mockito.any(), Mockito.any()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForIndexingCompleteStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForIndexingCompleteStepTests.java index 2f91393b451d7..a0982e72b11af 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForIndexingCompleteStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForIndexingCompleteStepTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -54,7 +54,7 @@ protected WaitForIndexingCompleteStep copyInstance(WaitForIndexingCompleteStep i public void testConditionMet() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -93,7 +93,7 @@ public void testConditionNotMet() { } IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(indexSettings) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java index 0264f7b09c6fd..db0c2957b3ccb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java @@ -38,7 +38,6 @@ import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; @@ -396,11 +395,7 @@ public void testEvaluateDoesntTriggerRolloverForIndexManuallyRolledOnLifecycleRo .putAlias(AliasMetadata.builder(rolloverAlias)) .settings(settings(IndexVersion.current()).put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, rolloverAlias)) .putRolloverInfo( - new RolloverInfo( - rolloverAlias, - Collections.singletonList(new MaxSizeCondition(ByteSizeValue.ofBytes(2L))), - System.currentTimeMillis() - ) + new RolloverInfo(rolloverAlias, List.of(new MaxSizeCondition(ByteSizeValue.ofBytes(2L))), System.currentTimeMillis()) ) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) @@ -432,7 +427,7 @@ public void testEvaluateTriggersRolloverForIndexManuallyRolledOnDifferentAlias() .putRolloverInfo( new RolloverInfo( randomAlphaOfLength(5), - Collections.singletonList(new MaxSizeCondition(ByteSizeValue.ofBytes(2L))), + List.of(new MaxSizeCondition(ByteSizeValue.ofBytes(2L))), System.currentTimeMillis() ) ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleResponseTests.java index 05c637a3a66c9..1dc8b24c3231d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleResponseTests.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -90,7 +89,7 @@ protected Writeable.Reader instanceReader() { protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList( + List.of( new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new), new NamedWriteableRegistry.Entry(LifecycleType.class, TestLifecycleType.TYPE, in -> TestLifecycleType.INSTANCE) ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequestTests.java index feb5ca24a021d..b87a4e41258b8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequestTests.java @@ -34,7 +34,6 @@ import org.junit.Before; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; public class PutLifecycleRequestTests extends AbstractXContentSerializingTestCase { @@ -78,7 +77,7 @@ public String getPolicyName() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList( + List.of( new NamedWriteableRegistry.Entry( LifecycleType.class, TimeseriesLifecycleType.TYPE, @@ -105,7 +104,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { protected NamedXContentRegistry xContentRegistry() { List entries = new ArrayList<>(ClusterModule.getNamedXWriteables()); entries.addAll( - Arrays.asList( + List.of( new NamedXContentRegistry.Entry( LifecycleType.class, new ParseField(TimeseriesLifecycleType.TYPE), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyResponseTests.java index 76f4d732f4ae7..44fed3d4b488b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyResponseTests.java @@ -14,15 +14,13 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; public class RemoveIndexLifecyclePolicyResponseTests extends AbstractXContentSerializingTestCase { @Override protected Response createTestInstance() { - List failedIndexes = Arrays.asList(generateRandomStringArray(20, 20, false)); + List failedIndexes = List.of(generateRandomStringArray(20, 20, false)); return new Response(failedIndexes); } @@ -35,7 +33,7 @@ protected Writeable.Reader instanceReader() { protected Response mutateInstance(Response instance) { List failedIndices = randomValueOtherThan( instance.getFailedIndexes(), - () -> Arrays.asList(generateRandomStringArray(20, 20, false)) + () -> List.of(generateRandomStringArray(20, 20, false)) ); return new Response(failedIndices); } @@ -53,7 +51,7 @@ public void testNullFailedIndices() { public void testHasFailures() { Response response = new Response(new ArrayList<>()); assertFalse(response.hasFailures()); - assertEquals(Collections.emptyList(), response.getFailedIndexes()); + assertEquals(List.of(), response.getFailedIndexes()); int size = randomIntBetween(1, 10); List failedIndexes = new ArrayList<>(size); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index 9663e41a647a8..bfac286bc3c35 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -614,7 +614,8 @@ public void testGetConfigurationByContextName() throws Exception { "xpack.security.authc.realms.ldap.realm1.ssl", "xpack.security.authc.realms.saml.realm2.ssl", "xpack.monitoring.exporters.mon1.ssl", - "xpack.monitoring.exporters.mon2.ssl" }; + "xpack.monitoring.exporters.mon2.ssl", + "xpack.inference.elastic.http.ssl" }; assumeTrue("Not enough cipher suites are available to support this test", getCipherSuites.length >= contextNames.length); diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java index 6d95038e2cbcc..54a48ab34e991 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java @@ -63,7 +63,7 @@ public void testMlDeprecationChecks() throws Exception { indexDoc( ".ml-anomalies-.write-" + jobId, jobId + "_model_snapshot_2", - "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"2\",\"snapshot_doc_count\":1,\"min_version\":\"8.0.0\"}" + "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"2\",\"snapshot_doc_count\":1,\"min_version\":\"8.3.0\"}" ); client().performRequest(new Request("POST", "/.ml-anomalies-*/_refresh")); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 87d0bfb93e18c..7ad0758d99832 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -366,7 +366,7 @@ private static ClusterState removeSkippedSettings(ClusterState state, String[] i public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { - private static final IndicesOptions INDICES_OPTIONS = IndicesOptions.fromOptions(false, true, true, true); + private static final IndicesOptions INDICES_OPTIONS = IndicesOptions.fromOptions(false, true, true, true, true); private String[] indices; public Request(TimeValue masterNodeTimeout, String... indices) { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecker.java index c0e1c054f7a13..88adfe5157418 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecker.java @@ -26,8 +26,7 @@ import java.util.Map; import java.util.Optional; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_SUPPORTED_SNAPSHOT_VERSION; public class MlDeprecationChecker implements DeprecationChecker { @@ -69,13 +68,13 @@ static Optional checkDataFeedAggregations(DatafeedConfig dataf } static Optional checkModelSnapshot(ModelSnapshot modelSnapshot) { - if (modelSnapshot.getMinVersion().before(MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION)) { + if (modelSnapshot.getMinVersion().before(MIN_SUPPORTED_SNAPSHOT_VERSION)) { StringBuilder details = new StringBuilder( String.format( Locale.ROOT, "Delete model snapshot [%s] or update it to %s or greater.", modelSnapshot.getSnapshotId(), - MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION + MIN_SUPPORTED_SNAPSHOT_VERSION ) ); if (modelSnapshot.getLatestRecordTimeStamp() != null) { diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml index b0f850d09f76d..094d9cbf43089 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml @@ -58,7 +58,7 @@ setup: connector.put: connector_id: test-connector-native body: - index_name: search-test + index_name: content-search-test is_native: true - match: { result: 'created' } @@ -68,7 +68,7 @@ setup: connector_id: test-connector-native - match: { id: test-connector-native } - - match: { index_name: search-test } + - match: { index_name: content-search-test } - match: { is_native: true } - match: { sync_now: false } - match: { status: needs_configuration } @@ -151,6 +151,7 @@ setup: is_native: false service_type: super-connector + --- 'Create Connector - Id returned as part of response': - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml index 4ffa5435a3d7b..f804dc02a9e01 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml @@ -125,3 +125,29 @@ setup: connector_id: test-connector - match: { index_name: search-1-test } + + +--- +"Update Managed Connector Index Name": + - do: + connector.put: + connector_id: test-connector-1 + body: + is_native: true + service_type: super-connector + + - do: + connector.update_index_name: + connector_id: test-connector-1 + body: + index_name: content-search-2-test + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector-1 + + - match: { index_name: content-search-2-test } + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml index 77c57532ad479..f8cd24d175312 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml @@ -7,7 +7,7 @@ setup: connector.put: connector_id: test-connector body: - index_name: search-1-test + index_name: content-search-1-test name: my-connector language: pl is_native: false @@ -29,7 +29,6 @@ setup: connector_id: test-connector - match: { is_native: true } - - match: { status: configured } - do: connector.update_native: @@ -44,7 +43,6 @@ setup: connector_id: test-connector - match: { is_native: false } - - match: { status: configured } --- "Update Connector Native - 404 when connector doesn't exist": diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml index 1cbff6a35e18b..634f99cd53fde 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml @@ -71,7 +71,7 @@ setup: - do: connector.post: body: - index_name: search-test + index_name: content-search-test is_native: true - set: { id: id } @@ -82,7 +82,7 @@ setup: connector_id: $id - match: { id: $id } - - match: { index_name: search-test } + - match: { index_name: content-search-test } - match: { is_native: true } - match: { sync_now: false } - match: { status: needs_configuration } @@ -102,6 +102,7 @@ setup: is_native: false service_type: super-connector + --- 'Create Connector - Index name used by another connector': - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml index 10e4620ca5603..697b0ee419181 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml @@ -26,7 +26,7 @@ setup: connector.put: connector_id: connector-b body: - index_name: search-2-test + index_name: content-search-2-test name: my-connector-2 language: en is_native: true @@ -40,13 +40,13 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-a" } - - match: { results.0.index_name: "search-1-test" } - - match: { results.0.language: "pl" } + - match: { results.0.id: "connector-b" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.0.language: "en" } - - match: { results.1.id: "connector-b" } - - match: { results.1.index_name: "search-2-test" } - - match: { results.1.language: "en" } + - match: { results.1.id: "connector-a" } + - match: { results.1.index_name: "search-1-test" } + - match: { results.1.language: "pl" } - match: { results.2.id: "connector-c" } - match: { results.2.index_name: "search-3-test" } @@ -62,9 +62,9 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-b" } - - match: { results.0.index_name: "search-2-test" } - - match: { results.0.language: "en" } + - match: { results.0.id: "connector-a" } + - match: { results.0.index_name: "search-1-test" } + - match: { results.0.language: "pl" } - match: { results.1.id: "connector-c" } - match: { results.1.index_name: "search-3-test" } @@ -79,13 +79,13 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-a" } - - match: { results.0.index_name: "search-1-test" } - - match: { results.0.language: "pl" } + - match: { results.0.id: "connector-b" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.0.language: "en" } - - match: { results.1.id: "connector-b" } - - match: { results.1.index_name: "search-2-test" } - - match: { results.1.language: "en" } + - match: { results.1.id: "connector-a" } + - match: { results.1.index_name: "search-1-test" } + - match: { results.1.language: "pl" } --- "List Connector - empty": @@ -118,11 +118,11 @@ setup: - do: connector.list: - index_name: search-1-test,search-2-test + index_name: search-1-test,content-search-2-test - match: { count: 2 } - - match: { results.0.index_name: "search-1-test" } - - match: { results.1.index_name: "search-2-test" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.1.index_name: "search-1-test" } --- @@ -147,8 +147,8 @@ setup: connector_name: my-connector-1,my-connector-2 - match: { count: 2 } - - match: { results.0.name: "my-connector-1" } - - match: { results.1.name: "my-connector-2" } + - match: { results.0.name: "my-connector-2" } + - match: { results.1.name: "my-connector-1" } --- @@ -156,10 +156,10 @@ setup: - do: connector.list: connector_name: my-connector-1,my-connector-2 - index_name: search-2-test + index_name: content-search-2-test - match: { count: 1 } - - match: { results.0.index_name: "search-2-test" } + - match: { results.0.index_name: "content-search-2-test" } - match: { results.0.name: "my-connector-2" } @@ -230,13 +230,13 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-a" } - - match: { results.0.index_name: "search-1-test" } - - match: { results.0.language: "pl" } + - match: { results.0.id: "connector-b" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.0.language: "en" } - - match: { results.1.id: "connector-b" } - - match: { results.1.index_name: "search-2-test" } - - match: { results.1.language: "en" } + - match: { results.1.id: "connector-a" } + - match: { results.1.index_name: "search-1-test" } + - match: { results.1.language: "pl" } - match: { results.2.id: "connector-c" } - match: { results.2.index_name: "search-3-test" } @@ -255,13 +255,13 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-a" } - - match: { results.0.index_name: "search-1-test" } - - match: { results.0.language: "pl" } + - match: { results.0.id: "connector-b" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.0.language: "en" } - - match: { results.1.id: "connector-b" } - - match: { results.1.index_name: "search-2-test" } - - match: { results.1.language: "en" } + - match: { results.1.id: "connector-a" } + - match: { results.1.index_name: "search-1-test" } + - match: { results.1.language: "pl" } - match: { results.2.id: "connector-c" } - match: { results.2.index_name: "search-3-test" } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java new file mode 100644 index 0000000000000..b1ac2b36314fa --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression; + +import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; + +/** + * Expressions can implement this interface to control how they would be translated and pushed down as Lucene queries. + * When an expression implements {@link TranslationAware}, we call {@link #asQuery(TranslatorHandler)} to get the + * {@link Query} translation, instead of relying on the registered translators from EsqlExpressionTranslators. + */ +public interface TranslationAware { + Query asQuery(TranslatorHandler translatorHandler); +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java new file mode 100644 index 0000000000000..92a42d3053b68 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.querydsl.query; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * Expressions that store their own {@link QueryBuilder} and implement + * {@link org.elasticsearch.xpack.esql.core.expression.TranslationAware} can use {@link TranslationAwareExpressionQuery} + * to wrap their {@link QueryBuilder}, instead of using the other existing {@link Query} implementations. + */ +public class TranslationAwareExpressionQuery extends Query { + private final QueryBuilder queryBuilder; + + public TranslationAwareExpressionQuery(Source source, QueryBuilder queryBuilder) { + super(source); + this.queryBuilder = queryBuilder; + } + + @Override + public QueryBuilder asBuilder() { + return queryBuilder; + } + + @Override + protected String innerToString() { + return queryBuilder.toString(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java new file mode 100644 index 0000000000000..21306036fbf50 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java @@ -0,0 +1,187 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointDocValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentState state; + + private final List channels; + + public SpatialExtentCartesianPointDocValuesAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentCartesianPointDocValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialExtentCartesianPointDocValuesAggregatorFunction(driverContext, channels, SpatialExtentCartesianPointDocValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentCartesianPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + assert minX.getPositionCount() == 1; + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + assert maxX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentCartesianPointDocValuesAggregator.combineIntermediate(state, minX.getInt(0), maxX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianPointDocValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..751ea3b4c4a9d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentCartesianPointDocValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialExtentCartesianPointDocValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_cartesian_point_doc of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..a5191e57959b8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java @@ -0,0 +1,230 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction(List channels, + SpatialExtentGroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction(channels, SpatialExtentCartesianPointDocValuesAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minX.getPositionCount() == maxX.getPositionCount() && minX.getPositionCount() == maxY.getPositionCount() && minX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianPointDocValuesAggregator.combineIntermediate(state, groupId, minX.getInt(groupPosition + positionOffset), maxX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingState inState = ((SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentCartesianPointDocValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianPointDocValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java new file mode 100644 index 0000000000000..6610168e1df21 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java @@ -0,0 +1,192 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointSourceValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentState state; + + private final List channels; + + public SpatialExtentCartesianPointSourceValuesAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentCartesianPointSourceValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialExtentCartesianPointSourceValuesAggregatorFunction(driverContext, channels, SpatialExtentCartesianPointSourceValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + assert minX.getPositionCount() == 1; + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + assert maxX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentCartesianPointSourceValuesAggregator.combineIntermediate(state, minX.getInt(0), maxX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianPointSourceValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..7f4d1d69ae928 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentCartesianPointSourceValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialExtentCartesianPointSourceValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_cartesian_point_source of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..4e06158952fc3 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -0,0 +1,235 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction(List channels, + SpatialExtentGroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction(channels, SpatialExtentCartesianPointSourceValuesAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minX.getPositionCount() == maxX.getPositionCount() && minX.getPositionCount() == maxY.getPositionCount() && minX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianPointSourceValuesAggregator.combineIntermediate(state, groupId, minX.getInt(groupPosition + positionOffset), maxX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingState inState = ((SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentCartesianPointSourceValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianPointSourceValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunction.java new file mode 100644 index 0000000000000..19aa4f7ca78a2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunction.java @@ -0,0 +1,192 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianShapeAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentState state; + + private final List channels; + + public SpatialExtentCartesianShapeAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentCartesianShapeAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SpatialExtentCartesianShapeAggregatorFunction(driverContext, channels, SpatialExtentCartesianShapeAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentCartesianShapeAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentCartesianShapeAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianShapeAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianShapeAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + assert minX.getPositionCount() == 1; + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + assert maxX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentCartesianShapeAggregator.combineIntermediate(state, minX.getInt(0), maxX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianShapeAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..9e4b292a0ea29 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunctionSupplier.java @@ -0,0 +1,40 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianShapeAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentCartesianShapeAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentCartesianShapeAggregatorFunction aggregator(DriverContext driverContext) { + return SpatialExtentCartesianShapeAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentCartesianShapeGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentCartesianShapeGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_cartesian of shapes"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..c55c3d9c66946 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeGroupingAggregatorFunction.java @@ -0,0 +1,235 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianShapeGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentCartesianShapeGroupingAggregatorFunction(List channels, + SpatialExtentGroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentCartesianShapeGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new SpatialExtentCartesianShapeGroupingAggregatorFunction(channels, SpatialExtentCartesianShapeAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianShapeAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianShapeAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianShapeAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentCartesianShapeAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minX.getPositionCount() == maxX.getPositionCount() && minX.getPositionCount() == maxY.getPositionCount() && minX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianShapeAggregator.combineIntermediate(state, groupId, minX.getInt(groupPosition + positionOffset), maxX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingState inState = ((SpatialExtentCartesianShapeGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentCartesianShapeAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianShapeAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java new file mode 100644 index 0000000000000..c883e82d45989 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java @@ -0,0 +1,201 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointDocValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentStateWrappedLongitudeState state; + + private final List channels; + + public SpatialExtentGeoPointDocValuesAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentStateWrappedLongitudeState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentGeoPointDocValuesAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SpatialExtentGeoPointDocValuesAggregatorFunction(driverContext, channels, SpatialExtentGeoPointDocValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentGeoPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + assert minNegX.getPositionCount() == 1; + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + assert minPosX.getPositionCount() == 1; + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + assert maxNegX.getPositionCount() == 1; + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + assert maxPosX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentGeoPointDocValuesAggregator.combineIntermediate(state, minNegX.getInt(0), minPosX.getInt(0), maxNegX.getInt(0), maxPosX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoPointDocValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..f72a4cc648ec8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java @@ -0,0 +1,40 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentGeoPointDocValuesAggregatorFunction aggregator(DriverContext driverContext) { + return SpatialExtentGeoPointDocValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentGeoPointDocValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_geo_point_doc of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..eee5bc5df41a4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java @@ -0,0 +1,242 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingStateWrappedLongitudeState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentGeoPointDocValuesGroupingAggregatorFunction(List channels, + SpatialExtentGroupingStateWrappedLongitudeState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentGeoPointDocValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialExtentGeoPointDocValuesGroupingAggregatorFunction(channels, SpatialExtentGeoPointDocValuesAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minNegX.getPositionCount() == minPosX.getPositionCount() && minNegX.getPositionCount() == maxNegX.getPositionCount() && minNegX.getPositionCount() == maxPosX.getPositionCount() && minNegX.getPositionCount() == maxY.getPositionCount() && minNegX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoPointDocValuesAggregator.combineIntermediate(state, groupId, minNegX.getInt(groupPosition + positionOffset), minPosX.getInt(groupPosition + positionOffset), maxNegX.getInt(groupPosition + positionOffset), maxPosX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingStateWrappedLongitudeState inState = ((SpatialExtentGeoPointDocValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentGeoPointDocValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoPointDocValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java new file mode 100644 index 0000000000000..cf65fbdde594c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java @@ -0,0 +1,206 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointSourceValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentStateWrappedLongitudeState state; + + private final List channels; + + public SpatialExtentGeoPointSourceValuesAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentStateWrappedLongitudeState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentGeoPointSourceValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialExtentGeoPointSourceValuesAggregatorFunction(driverContext, channels, SpatialExtentGeoPointSourceValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentGeoPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + assert minNegX.getPositionCount() == 1; + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + assert minPosX.getPositionCount() == 1; + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + assert maxNegX.getPositionCount() == 1; + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + assert maxPosX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentGeoPointSourceValuesAggregator.combineIntermediate(state, minNegX.getInt(0), minPosX.getInt(0), maxNegX.getInt(0), maxPosX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoPointSourceValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..1af20d72d08b0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentGeoPointSourceValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialExtentGeoPointSourceValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_geo_point_source of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..bf8ab2554c7b7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java @@ -0,0 +1,247 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingStateWrappedLongitudeState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction(List channels, + SpatialExtentGroupingStateWrappedLongitudeState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction(channels, SpatialExtentGeoPointSourceValuesAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minNegX.getPositionCount() == minPosX.getPositionCount() && minNegX.getPositionCount() == maxNegX.getPositionCount() && minNegX.getPositionCount() == maxPosX.getPositionCount() && minNegX.getPositionCount() == maxY.getPositionCount() && minNegX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoPointSourceValuesAggregator.combineIntermediate(state, groupId, minNegX.getInt(groupPosition + positionOffset), minPosX.getInt(groupPosition + positionOffset), maxNegX.getInt(groupPosition + positionOffset), maxPosX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingStateWrappedLongitudeState inState = ((SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentGeoPointSourceValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoPointSourceValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunction.java new file mode 100644 index 0000000000000..abee9a1cee284 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunction.java @@ -0,0 +1,206 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoShapeAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentStateWrappedLongitudeState state; + + private final List channels; + + public SpatialExtentGeoShapeAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentStateWrappedLongitudeState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentGeoShapeAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SpatialExtentGeoShapeAggregatorFunction(driverContext, channels, SpatialExtentGeoShapeAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentGeoShapeAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentGeoShapeAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoShapeAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoShapeAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + assert minNegX.getPositionCount() == 1; + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + assert minPosX.getPositionCount() == 1; + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + assert maxNegX.getPositionCount() == 1; + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + assert maxPosX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentGeoShapeAggregator.combineIntermediate(state, minNegX.getInt(0), minPosX.getInt(0), maxNegX.getInt(0), maxPosX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoShapeAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..09f210c7085f8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunctionSupplier.java @@ -0,0 +1,40 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoShapeAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentGeoShapeAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentGeoShapeAggregatorFunction aggregator(DriverContext driverContext) { + return SpatialExtentGeoShapeAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentGeoShapeGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentGeoShapeGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_geo of shapes"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..1200259ea6c41 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeGroupingAggregatorFunction.java @@ -0,0 +1,247 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoShapeGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingStateWrappedLongitudeState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentGeoShapeGroupingAggregatorFunction(List channels, + SpatialExtentGroupingStateWrappedLongitudeState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentGeoShapeGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new SpatialExtentGeoShapeGroupingAggregatorFunction(channels, SpatialExtentGeoShapeAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoShapeAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoShapeAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoShapeAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentGeoShapeAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minNegX.getPositionCount() == minPosX.getPositionCount() && minNegX.getPositionCount() == maxNegX.getPositionCount() && minNegX.getPositionCount() == maxPosX.getPositionCount() && minNegX.getPositionCount() == maxY.getPositionCount() && minNegX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoShapeAggregator.combineIntermediate(state, groupId, minNegX.getInt(groupPosition + positionOffset), minPosX.getInt(groupPosition + positionOffset), maxNegX.getInt(groupPosition + positionOffset), maxPosX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingStateWrappedLongitudeState inState = ((SpatialExtentGeoShapeGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentGeoShapeAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoShapeAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java index 45a45f4337beb..5fa1394e8cf96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java @@ -28,7 +28,7 @@ public AbstractArrayState(BigArrays bigArrays) { this.bigArrays = bigArrays; } - boolean hasValue(int groupId) { + public boolean hasValue(int groupId) { return seen == null || seen.get(groupId); } @@ -37,7 +37,7 @@ boolean hasValue(int groupId) { * idempotent and fast if already tracking so it's safe to, say, call it once * for every block of values that arrives containing {@code null}. */ - final void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + public final void enableGroupIdTracking(SeenGroupIds seenGroupIds) { if (seen == null) { seen = seenGroupIds.seenGroupIds(bigArrays); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index 9b53e6558f4db..191d6443264ca 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -180,13 +180,16 @@ public static BlockHash buildCategorizeBlockHash( List groups, AggregatorMode aggregatorMode, BlockFactory blockFactory, - AnalysisRegistry analysisRegistry + AnalysisRegistry analysisRegistry, + int emitBatchSize ) { - if (groups.size() != 1) { - throw new IllegalArgumentException("only a single CATEGORIZE group can used"); + if (groups.size() == 1) { + return new CategorizeBlockHash(blockFactory, groups.get(0).channel, aggregatorMode, analysisRegistry); + } else { + assert groups.get(0).isCategorize(); + assert groups.subList(1, groups.size()).stream().noneMatch(GroupSpec::isCategorize); + return new CategorizePackedValuesBlockHash(groups, blockFactory, aggregatorMode, analysisRegistry, emitBatchSize); } - - return new CategorizeBlockHash(blockFactory, groups.get(0).channel, aggregatorMode, analysisRegistry); } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java index 35c6faf84e623..f83776fbdbc85 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java @@ -44,7 +44,7 @@ import java.util.Objects; /** - * Base BlockHash implementation for {@code Categorize} grouping function. + * BlockHash implementation for {@code Categorize} grouping function. */ public class CategorizeBlockHash extends BlockHash { @@ -53,11 +53,9 @@ public class CategorizeBlockHash extends BlockHash { ); private static final int NULL_ORD = 0; - // TODO: this should probably also take an emitBatchSize private final int channel; private final AggregatorMode aggregatorMode; private final TokenListCategorizer.CloseableTokenListCategorizer categorizer; - private final CategorizeEvaluator evaluator; /** @@ -95,12 +93,14 @@ public class CategorizeBlockHash extends BlockHash { } } + boolean seenNull() { + return seenNull; + } + @Override public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { - if (aggregatorMode.isInputPartial() == false) { - addInitial(page, addInput); - } else { - addIntermediate(page, addInput); + try (IntBlock block = add(page)) { + addInput.add(0, block); } } @@ -129,50 +129,38 @@ public void close() { Releasables.close(evaluator, categorizer); } + private IntBlock add(Page page) { + return aggregatorMode.isInputPartial() == false ? addInitial(page) : addIntermediate(page); + } + /** * Adds initial (raw) input to the state. */ - private void addInitial(Page page, GroupingAggregatorFunction.AddInput addInput) { - try (IntBlock result = (IntBlock) evaluator.eval(page.getBlock(channel))) { - addInput.add(0, result); - } + IntBlock addInitial(Page page) { + return (IntBlock) evaluator.eval(page.getBlock(channel)); } /** * Adds intermediate state to the state. */ - private void addIntermediate(Page page, GroupingAggregatorFunction.AddInput addInput) { + private IntBlock addIntermediate(Page page) { if (page.getPositionCount() == 0) { - return; + return null; } BytesRefBlock categorizerState = page.getBlock(channel); if (categorizerState.areAllValuesNull()) { seenNull = true; - try (var newIds = blockFactory.newConstantIntVector(NULL_ORD, 1)) { - addInput.add(0, newIds); - } - return; - } - - Map idMap = readIntermediate(categorizerState.getBytesRef(0, new BytesRef())); - try (IntBlock.Builder newIdsBuilder = blockFactory.newIntBlockBuilder(idMap.size())) { - int fromId = idMap.containsKey(0) ? 0 : 1; - int toId = fromId + idMap.size(); - for (int i = fromId; i < toId; i++) { - newIdsBuilder.appendInt(idMap.get(i)); - } - try (IntBlock newIds = newIdsBuilder.build()) { - addInput.add(0, newIds); - } + return blockFactory.newConstantIntBlockWith(NULL_ORD, 1); } + return recategorize(categorizerState.getBytesRef(0, new BytesRef()), null).asBlock(); } /** - * Read intermediate state from a block. - * - * @return a map from the old category id to the new one. The old ids go from 0 to {@code size - 1}. + * Reads the intermediate state from a block and recategorizes the provided IDs. + * If no IDs are provided, the IDs are the IDs in the categorizer's state in order. + * (So 0...N-1 or 1...N, depending on whether null is present.) */ - private Map readIntermediate(BytesRef bytes) { + IntVector recategorize(BytesRef bytes, IntVector ids) { Map idMap = new HashMap<>(); try (StreamInput in = new BytesArray(bytes).streamInput()) { if (in.readBoolean()) { @@ -185,10 +173,22 @@ private Map readIntermediate(BytesRef bytes) { // +1 because the 0 ordinal is reserved for null idMap.put(oldCategoryId + 1, newCategoryId + 1); } - return idMap; } catch (IOException e) { throw new RuntimeException(e); } + try (IntVector.Builder newIdsBuilder = blockFactory.newIntVectorBuilder(idMap.size())) { + if (ids == null) { + int idOffset = idMap.containsKey(0) ? 0 : 1; + for (int i = 0; i < idMap.size(); i++) { + newIdsBuilder.appendInt(idMap.get(i + idOffset)); + } + } else { + for (int i = 0; i < ids.getPositionCount(); i++) { + newIdsBuilder.appendInt(idMap.get(ids.getInt(i))); + } + } + return newIdsBuilder.build(); + } } /** @@ -198,15 +198,20 @@ private Block buildIntermediateBlock() { if (categorizer.getCategoryCount() == 0) { return blockFactory.newConstantNullBlock(seenNull ? 1 : 0); } + int positionCount = categorizer.getCategoryCount() + (seenNull ? 1 : 0); + // We're returning a block with N positions just because the Page must have all blocks with the same position count! + return blockFactory.newConstantBytesRefBlockWith(serializeCategorizer(), positionCount); + } + + BytesRef serializeCategorizer() { + // TODO: This BytesStreamOutput is not accounted for by the circuit breaker. Fix that! try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeBoolean(seenNull); out.writeVInt(categorizer.getCategoryCount()); for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { category.writeTo(out); } - // We're returning a block with N positions just because the Page must have all blocks with the same position count! - int positionCount = categorizer.getCategoryCount() + (seenNull ? 1 : 0); - return blockFactory.newConstantBytesRefBlockWith(out.bytes().toBytesRef(), positionCount); + return out.bytes().toBytesRef(); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHash.java new file mode 100644 index 0000000000000..20874cb10ceb8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHash.java @@ -0,0 +1,170 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.analysis.AnalysisRegistry; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * BlockHash implementation for {@code Categorize} grouping function as first + * grouping expression, followed by one or mode other grouping expressions. + *

    + * For the first grouping (the {@code Categorize} grouping function), a + * {@code CategorizeBlockHash} is used, which outputs integers (category IDs). + * Next, a {@code PackedValuesBlockHash} is used on the category IDs and the + * other groupings (which are not {@code Categorize}s). + */ +public class CategorizePackedValuesBlockHash extends BlockHash { + + private final List specs; + private final AggregatorMode aggregatorMode; + private final Block[] blocks; + private final CategorizeBlockHash categorizeBlockHash; + private final PackedValuesBlockHash packedValuesBlockHash; + + CategorizePackedValuesBlockHash( + List specs, + BlockFactory blockFactory, + AggregatorMode aggregatorMode, + AnalysisRegistry analysisRegistry, + int emitBatchSize + ) { + super(blockFactory); + this.specs = specs; + this.aggregatorMode = aggregatorMode; + blocks = new Block[specs.size()]; + + List delegateSpecs = new ArrayList<>(); + delegateSpecs.add(new GroupSpec(0, ElementType.INT)); + for (int i = 1; i < specs.size(); i++) { + delegateSpecs.add(new GroupSpec(i, specs.get(i).elementType())); + } + + boolean success = false; + try { + categorizeBlockHash = new CategorizeBlockHash(blockFactory, specs.get(0).channel(), aggregatorMode, analysisRegistry); + packedValuesBlockHash = new PackedValuesBlockHash(delegateSpecs, blockFactory, emitBatchSize); + success = true; + } finally { + if (success == false) { + close(); + } + } + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + try (IntBlock categories = getCategories(page)) { + blocks[0] = categories; + for (int i = 1; i < specs.size(); i++) { + blocks[i] = page.getBlock(specs.get(i).channel()); + } + packedValuesBlockHash.add(new Page(blocks), addInput); + } + } + + private IntBlock getCategories(Page page) { + if (aggregatorMode.isInputPartial() == false) { + return categorizeBlockHash.addInitial(page); + } else { + BytesRefBlock stateBlock = page.getBlock(0); + BytesRef stateBytes = stateBlock.getBytesRef(0, new BytesRef()); + try (StreamInput in = new BytesArray(stateBytes).streamInput()) { + BytesRef categorizerState = in.readBytesRef(); + try (IntVector ids = IntVector.readFrom(blockFactory, in)) { + return categorizeBlockHash.recategorize(categorizerState, ids).asBlock(); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public Block[] getKeys() { + Block[] keys = packedValuesBlockHash.getKeys(); + if (aggregatorMode.isOutputPartial() == false) { + // For final output, the keys are the category regexes. + try ( + BytesRefBlock regexes = (BytesRefBlock) categorizeBlockHash.getKeys()[0]; + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(keys[0].getPositionCount()) + ) { + IntVector idsVector = (IntVector) keys[0].asVector(); + int idsOffset = categorizeBlockHash.seenNull() ? 0 : -1; + BytesRef scratch = new BytesRef(); + for (int i = 0; i < idsVector.getPositionCount(); i++) { + int id = idsVector.getInt(i); + if (id == 0) { + builder.appendNull(); + } else { + builder.appendBytesRef(regexes.getBytesRef(id + idsOffset, scratch)); + } + } + keys[0].close(); + keys[0] = builder.build(); + } + } else { + // For intermediate output, the keys are the delegate PackedValuesBlockHash's + // keys, with the category IDs replaced by the categorizer's internal state + // together with the list of category IDs. + BytesRef state; + // TODO: This BytesStreamOutput is not accounted for by the circuit breaker. Fix that! + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeBytesRef(categorizeBlockHash.serializeCategorizer()); + ((IntVector) keys[0].asVector()).writeTo(out); + state = out.bytes().toBytesRef(); + } catch (IOException e) { + throw new RuntimeException(e); + } + keys[0].close(); + keys[0] = blockFactory.newConstantBytesRefBlockWith(state, keys[0].getPositionCount()); + } + return keys; + } + + @Override + public IntVector nonEmpty() { + return packedValuesBlockHash.nonEmpty(); + } + + @Override + public BitArray seenGroupIds(BigArrays bigArrays) { + return packedValuesBlockHash.seenGroupIds(bigArrays); + } + + @Override + public final ReleasableIterator lookup(Page page, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException(); + } + + @Override + public void close() { + Releasables.close(categorizeBlockHash, packedValuesBlockHash); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java index c66c960dd8a99..47d927fda91b5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java @@ -32,6 +32,13 @@ * This requires that the planner has planned that points are loaded from the index as doc-values. */ abstract class CentroidPointAggregator { + public static CentroidState initSingle() { + return new CentroidState(); + } + + public static GroupingCentroidState initGrouping(BigArrays bigArrays) { + return new GroupingCentroidState(bigArrays); + } public static void combine(CentroidState current, double xVal, double xDel, double yVal, double yDel, long count) { current.add(xVal, xDel, yVal, yDel, count); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/GeoPointEnvelopeVisitor.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/GeoPointEnvelopeVisitor.java new file mode 100644 index 0000000000000..6bdd028f3d6ee --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/GeoPointEnvelopeVisitor.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; + +class GeoPointEnvelopeVisitor extends SpatialEnvelopeVisitor.GeoPointVisitor { + GeoPointEnvelopeVisitor() { + super(WrapLongitude.WRAP); + } + + void reset() { + minY = Double.POSITIVE_INFINITY; + maxY = Double.NEGATIVE_INFINITY; + minNegX = Double.POSITIVE_INFINITY; + maxNegX = Double.NEGATIVE_INFINITY; + minPosX = Double.POSITIVE_INFINITY; + maxPosX = Double.NEGATIVE_INFINITY; + } + + double getMinNegX() { + return minNegX; + } + + double getMinPosX() { + return minPosX; + } + + double getMaxNegX() { + return maxNegX; + } + + double getMaxPosX() { + return maxPosX; + } + + double getMaxY() { + return maxY; + } + + double getMinY() { + return minY; + } + + static Rectangle asRectangle( + double minNegX, + double minPosX, + double maxNegX, + double maxPosX, + double maxY, + double minY, + WrapLongitude wrapLongitude + ) { + return SpatialEnvelopeVisitor.GeoPointVisitor.getResult(minNegX, minPosX, maxNegX, maxPosX, maxY, minY, wrapLongitude); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/PointType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/PointType.java new file mode 100644 index 0000000000000..5395ca0b85163 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/PointType.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.geo.XYEncodingUtils; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; + +import java.util.Optional; + +public enum PointType { + GEO { + @Override + public Optional computeEnvelope(Geometry geo) { + return SpatialEnvelopeVisitor.visitGeo(geo, WrapLongitude.WRAP); + } + + @Override + public double decodeX(int encoded) { + return GeoEncodingUtils.decodeLongitude(encoded); + } + + @Override + public double decodeY(int encoded) { + return GeoEncodingUtils.decodeLatitude(encoded); + } + + @Override + public int encodeX(double decoded) { + return GeoEncodingUtils.encodeLongitude(decoded); + } + + @Override + public int encodeY(double decoded) { + return GeoEncodingUtils.encodeLatitude(decoded); + } + + // Geo encodes the longitude in the lower 32 bits and the latitude in the upper 32 bits. + @Override + public int extractX(long encoded) { + return SpatialAggregationUtils.extractSecond(encoded); + } + + @Override + public int extractY(long encoded) { + return SpatialAggregationUtils.extractFirst(encoded); + } + }, + CARTESIAN { + @Override + public Optional computeEnvelope(Geometry geo) { + return SpatialEnvelopeVisitor.visitCartesian(geo); + } + + @Override + public double decodeX(int encoded) { + return XYEncodingUtils.decode(encoded); + } + + @Override + public double decodeY(int encoded) { + return XYEncodingUtils.decode(encoded); + } + + @Override + public int encodeX(double decoded) { + return XYEncodingUtils.encode((float) decoded); + } + + @Override + public int encodeY(double decoded) { + return XYEncodingUtils.encode((float) decoded); + } + + @Override + public int extractX(long encoded) { + return SpatialAggregationUtils.extractFirst(encoded); + } + + @Override + public int extractY(long encoded) { + return SpatialAggregationUtils.extractSecond(encoded); + } + }; + + public abstract Optional computeEnvelope(Geometry geo); + + public abstract double decodeX(int encoded); + + public abstract double decodeY(int encoded); + + public abstract int encodeX(double decoded); + + public abstract int encodeY(double decoded); + + public abstract int extractX(long encoded); + + public abstract int extractY(long encoded); +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialAggregationUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialAggregationUtils.java new file mode 100644 index 0000000000000..6b29b20601dae --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialAggregationUtils.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.geo.XYEncodingUtils; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +class SpatialAggregationUtils { + private SpatialAggregationUtils() { /* Utility class */ } + + public static Geometry decode(BytesRef wkb) { + return WellKnownBinary.fromWKB(GeometryValidator.NOOP, false /* coerce */, wkb.bytes, wkb.offset, wkb.length); + } + + public static Point decodePoint(BytesRef wkb) { + return (Point) decode(wkb); + } + + public static double decodeX(long encoded) { + return XYEncodingUtils.decode(extractFirst(encoded)); + } + + public static int extractFirst(long encoded) { + return (int) (encoded >>> 32); + } + + public static double decodeY(long encoded) { + return XYEncodingUtils.decode(extractSecond(encoded)); + } + + public static int extractSecond(long encoded) { + return (int) (encoded & 0xFFFFFFFFL); + } + + public static double decodeLongitude(long encoded) { + return GeoEncodingUtils.decodeLongitude((int) (encoded & 0xFFFFFFFFL)); + } + + public static double decodeLatitude(long encoded) { + return GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)); + } + + public static int encodeNegativeLongitude(double d) { + return Double.isFinite(d) ? GeoEncodingUtils.encodeLongitude(d) : DEFAULT_NEG; + } + + public static int encodePositiveLongitude(double d) { + return Double.isFinite(d) ? GeoEncodingUtils.encodeLongitude(d) : DEFAULT_POS; + } + + public static Rectangle asRectangle(int minNegX, int minPosX, int maxNegX, int maxPosX, int maxY, int minY) { + assert minNegX <= 0 == maxNegX <= 0; + assert minPosX >= 0 == maxPosX >= 0; + return GeoPointEnvelopeVisitor.asRectangle( + minNegX <= 0 ? decodeLongitude(minNegX) : Double.POSITIVE_INFINITY, + minPosX >= 0 ? decodeLongitude(minPosX) : Double.POSITIVE_INFINITY, + maxNegX <= 0 ? decodeLongitude(maxNegX) : Double.NEGATIVE_INFINITY, + maxPosX >= 0 ? decodeLongitude(maxPosX) : Double.NEGATIVE_INFINITY, + GeoEncodingUtils.decodeLatitude(maxY), + GeoEncodingUtils.decodeLatitude(minY), + WrapLongitude.WRAP + ); + } + + public static int maxNeg(int a, int b) { + return a <= 0 && b <= 0 ? Math.max(a, b) : Math.min(a, b); + } + + public static int minPos(int a, int b) { + return a >= 0 && b >= 0 ? Math.min(a, b) : Math.max(a, b); + } + + // The default values are intentionally non-negative/non-positive, so we can mark unassigned values. + public static final int DEFAULT_POS = -1; + public static final int DEFAULT_NEG = 1; +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java index 0bafb6f8112de..891c22b71c7e9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java @@ -7,12 +7,13 @@ package org.elasticsearch.compute.aggregation.spatial; -import org.apache.lucene.geo.XYEncodingUtils; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; +import static org.elasticsearch.compute.aggregation.spatial.SpatialAggregationUtils.decodeX; +import static org.elasticsearch.compute.aggregation.spatial.SpatialAggregationUtils.decodeY; + /** * This aggregator calculates the centroid of a set of cartesian points. * It is assumes that the cartesian points are encoded as longs. @@ -28,15 +29,6 @@ ) @GroupingAggregator class SpatialCentroidCartesianPointDocValuesAggregator extends CentroidPointAggregator { - - public static CentroidState initSingle() { - return new CentroidState(); - } - - public static GroupingCentroidState initGrouping(BigArrays bigArrays) { - return new GroupingCentroidState(bigArrays); - } - public static void combine(CentroidState current, long v) { current.add(decodeX(v), decodeY(v)); } @@ -44,12 +36,4 @@ public static void combine(CentroidState current, long v) { public static void combine(GroupingCentroidState current, int groupId, long encoded) { current.add(decodeX(encoded), 0d, decodeY(encoded), 0d, 1, groupId); } - - private static double decodeX(long encoded) { - return XYEncodingUtils.decode((int) (encoded >>> 32)); - } - - private static double decodeY(long encoded) { - return XYEncodingUtils.decode((int) (encoded & 0xFFFFFFFFL)); - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java index 5673892be4bf0..700721e3ea9d4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java @@ -8,13 +8,10 @@ package org.elasticsearch.compute.aggregation.spatial; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.utils.GeometryValidator; -import org.elasticsearch.geometry.utils.WellKnownBinary; /** * This aggregator calculates the centroid of a set of cartesian points. @@ -33,26 +30,13 @@ ) @GroupingAggregator class SpatialCentroidCartesianPointSourceValuesAggregator extends CentroidPointAggregator { - - public static CentroidState initSingle() { - return new CentroidState(); - } - - public static GroupingCentroidState initGrouping(BigArrays bigArrays) { - return new GroupingCentroidState(bigArrays); - } - public static void combine(CentroidState current, BytesRef wkb) { - Point point = decode(wkb); + Point point = SpatialAggregationUtils.decodePoint(wkb); current.add(point.getX(), point.getY()); } public static void combine(GroupingCentroidState current, int groupId, BytesRef wkb) { - Point point = decode(wkb); + Point point = SpatialAggregationUtils.decodePoint(wkb); current.add(point.getX(), 0d, point.getY(), 0d, 1, groupId); } - - private static Point decode(BytesRef wkb) { - return (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java index ee5ab0e292547..431e25a03779e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java @@ -7,12 +7,13 @@ package org.elasticsearch.compute.aggregation.spatial; -import org.apache.lucene.geo.GeoEncodingUtils; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; +import static org.elasticsearch.compute.aggregation.spatial.SpatialAggregationUtils.decodeLatitude; +import static org.elasticsearch.compute.aggregation.spatial.SpatialAggregationUtils.decodeLongitude; + /** * This aggregator calculates the centroid of a set of geo points. It is assumes that the geo points are encoded as longs. * This requires that the planner has planned that points are loaded from the index as doc-values. @@ -27,28 +28,11 @@ ) @GroupingAggregator class SpatialCentroidGeoPointDocValuesAggregator extends CentroidPointAggregator { - - public static CentroidState initSingle() { - return new CentroidState(); - } - - public static GroupingCentroidState initGrouping(BigArrays bigArrays) { - return new GroupingCentroidState(bigArrays); - } - public static void combine(CentroidState current, long v) { - current.add(decodeX(v), decodeY(v)); + current.add(decodeLongitude(v), decodeLatitude(v)); } public static void combine(GroupingCentroidState current, int groupId, long encoded) { - current.add(decodeX(encoded), 0d, decodeY(encoded), 0d, 1, groupId); - } - - private static double decodeX(long encoded) { - return GeoEncodingUtils.decodeLongitude((int) (encoded & 0xFFFFFFFFL)); - } - - private static double decodeY(long encoded) { - return GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)); + current.add(decodeLongitude(encoded), 0d, decodeLatitude(encoded), 0d, 1, groupId); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java index caf55dcc2f4e1..90563b33b8abb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java @@ -8,13 +8,10 @@ package org.elasticsearch.compute.aggregation.spatial; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.utils.GeometryValidator; -import org.elasticsearch.geometry.utils.WellKnownBinary; /** * This aggregator calculates the centroid of a set of geo points. @@ -33,26 +30,13 @@ ) @GroupingAggregator class SpatialCentroidGeoPointSourceValuesAggregator extends CentroidPointAggregator { - - public static CentroidState initSingle() { - return new CentroidState(); - } - - public static GroupingCentroidState initGrouping(BigArrays bigArrays) { - return new GroupingCentroidState(bigArrays); - } - public static void combine(CentroidState current, BytesRef wkb) { - Point point = decode(wkb); + Point point = SpatialAggregationUtils.decodePoint(wkb); current.add(point.getX(), point.getY()); } public static void combine(GroupingCentroidState current, int groupId, BytesRef wkb) { - Point point = decode(wkb); + Point point = SpatialAggregationUtils.decodePoint(wkb); current.add(point.getX(), 0d, point.getY(), 0d, 1, groupId); } - - private static Point decode(BytesRef wkb) { - return (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentAggregator.java new file mode 100644 index 0000000000000..91e0f098d795e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentAggregator.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +// A bit of abuse of notation here, since we're extending this class to "inherit" its static methods. +// Unfortunately, this is the way it has to be done, since the generated code invokes these methods statically. +abstract class SpatialExtentAggregator { + public static void combineIntermediate(SpatialExtentState current, int minX, int maxX, int maxY, int minY) { + current.add(minX, maxX, maxY, minY); + } + + public static void combineIntermediate(SpatialExtentGroupingState current, int groupId, int minX, int maxX, int maxY, int minY) { + current.add(groupId, minX, maxX, maxY, minY); + } + + public static Block evaluateFinal(SpatialExtentState state, DriverContext driverContext) { + return state.toBlock(driverContext); + } + + public static Block evaluateFinal(SpatialExtentGroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(selected, driverContext); + } + + public static void combineStates(SpatialExtentGroupingState current, int groupId, SpatialExtentGroupingState inState, int inPosition) { + current.add(groupId, inState, inPosition); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregator.java new file mode 100644 index 0000000000000..f64949b77707c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregator.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of cartesian points. It is assumed the points are encoded as longs. + * This requires that the planner has planned that points are loaded from the index as doc-values. + */ +@Aggregator( + { + @IntermediateState(name = "minX", type = "INT"), + @IntermediateState(name = "maxX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentCartesianPointDocValuesAggregator extends SpatialExtentAggregator { + public static SpatialExtentState initSingle() { + return new SpatialExtentState(PointType.CARTESIAN); + } + + public static SpatialExtentGroupingState initGrouping() { + return new SpatialExtentGroupingState(PointType.CARTESIAN); + } + + public static void combine(SpatialExtentState current, long v) { + current.add(v); + } + + public static void combine(SpatialExtentGroupingState current, int groupId, long v) { + current.add(groupId, v); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregator.java new file mode 100644 index 0000000000000..3488af4525dcb --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregator.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of cartesian points. It is assumed that the cartesian points are encoded as WKB BytesRef. + * This requires that the planner has NOT planned that points are loaded from the index as doc-values, but from source instead. + * This is also used for final aggregations and aggregations in the coordinator node, + * even if the local node partial aggregation is done with {@link SpatialExtentCartesianPointDocValuesAggregator}. + */ +@Aggregator( + { + @IntermediateState(name = "minX", type = "INT"), + @IntermediateState(name = "maxX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentCartesianPointSourceValuesAggregator extends SpatialExtentAggregator { + public static SpatialExtentState initSingle() { + return new SpatialExtentState(PointType.CARTESIAN); + } + + public static SpatialExtentGroupingState initGrouping() { + return new SpatialExtentGroupingState(PointType.CARTESIAN); + } + + public static void combine(SpatialExtentState current, BytesRef bytes) { + current.add(SpatialAggregationUtils.decode(bytes)); + } + + public static void combine(SpatialExtentGroupingState current, int groupId, BytesRef bytes) { + current.add(groupId, SpatialAggregationUtils.decode(bytes)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregator.java new file mode 100644 index 0000000000000..6d50d27aa5a2d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregator.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of cartesian shapes. It is assumed that the cartesian shapes are encoded as WKB BytesRef. + * We do not currently support reading shape values or extents from doc values. + */ +@Aggregator( + { + @IntermediateState(name = "minX", type = "INT"), + @IntermediateState(name = "maxX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentCartesianShapeAggregator extends SpatialExtentAggregator { + public static SpatialExtentState initSingle() { + return new SpatialExtentState(PointType.CARTESIAN); + } + + public static SpatialExtentGroupingState initGrouping() { + return new SpatialExtentGroupingState(PointType.CARTESIAN); + } + + public static void combine(SpatialExtentState current, BytesRef bytes) { + current.add(SpatialAggregationUtils.decode(bytes)); + } + + public static void combine(SpatialExtentGroupingState current, int groupId, BytesRef bytes) { + current.add(groupId, SpatialAggregationUtils.decode(bytes)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregator.java new file mode 100644 index 0000000000000..b9b8bf65e116b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregator.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of geo points. It is assumed the points are encoded as longs. + * This requires that the planner has planned that points are loaded from the index as doc-values. + */ +@Aggregator( + { + @IntermediateState(name = "minNegX", type = "INT"), + @IntermediateState(name = "minPosX", type = "INT"), + @IntermediateState(name = "maxNegX", type = "INT"), + @IntermediateState(name = "maxPosX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentGeoPointDocValuesAggregator extends SpatialExtentLongitudeWrappingAggregator { + // TODO support non-longitude wrapped geo shapes. + public static SpatialExtentStateWrappedLongitudeState initSingle() { + return new SpatialExtentStateWrappedLongitudeState(); + } + + public static SpatialExtentGroupingStateWrappedLongitudeState initGrouping() { + return new SpatialExtentGroupingStateWrappedLongitudeState(); + } + + public static void combine(SpatialExtentStateWrappedLongitudeState current, long encoded) { + current.add(encoded); + } + + public static void combine(SpatialExtentGroupingStateWrappedLongitudeState current, int groupId, long encoded) { + current.add(groupId, encoded); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregator.java new file mode 100644 index 0000000000000..36a4e359f23fc --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregator.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of geo points. It is assumed that the geo points are encoded as WKB BytesRef. + * This requires that the planner has NOT planned that points are loaded from the index as doc-values, but from source instead. + * This is also used for final aggregations and aggregations in the coordinator node, + * even if the local node partial aggregation is done with {@link SpatialExtentGeoPointDocValuesAggregator}. + */ +@Aggregator( + { + @IntermediateState(name = "minNegX", type = "INT"), + @IntermediateState(name = "minPosX", type = "INT"), + @IntermediateState(name = "maxNegX", type = "INT"), + @IntermediateState(name = "maxPosX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentGeoPointSourceValuesAggregator extends SpatialExtentLongitudeWrappingAggregator { + // TODO support non-longitude wrapped geo shapes. + public static SpatialExtentStateWrappedLongitudeState initSingle() { + return new SpatialExtentStateWrappedLongitudeState(); + } + + public static SpatialExtentGroupingStateWrappedLongitudeState initGrouping() { + return new SpatialExtentGroupingStateWrappedLongitudeState(); + } + + public static void combine(SpatialExtentStateWrappedLongitudeState current, BytesRef bytes) { + current.add(SpatialAggregationUtils.decode(bytes)); + } + + public static void combine(SpatialExtentGroupingStateWrappedLongitudeState current, int groupId, BytesRef bytes) { + current.add(groupId, SpatialAggregationUtils.decode(bytes)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregator.java new file mode 100644 index 0000000000000..3d1b9b6300c9d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregator.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of geo shapes. It is assumed that the geo shapes are encoded as WKB BytesRef. + * We do not currently support reading shape values or extents from doc values. + */ +@Aggregator( + { + @IntermediateState(name = "minNegX", type = "INT"), + @IntermediateState(name = "minPosX", type = "INT"), + @IntermediateState(name = "maxNegX", type = "INT"), + @IntermediateState(name = "maxPosX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentGeoShapeAggregator extends SpatialExtentLongitudeWrappingAggregator { + // TODO support non-longitude wrapped geo shapes. + public static SpatialExtentStateWrappedLongitudeState initSingle() { + return new SpatialExtentStateWrappedLongitudeState(); + } + + public static SpatialExtentGroupingStateWrappedLongitudeState initGrouping() { + return new SpatialExtentGroupingStateWrappedLongitudeState(); + } + + public static void combine(SpatialExtentStateWrappedLongitudeState current, BytesRef bytes) { + current.add(SpatialAggregationUtils.decode(bytes)); + } + + public static void combine(SpatialExtentGroupingStateWrappedLongitudeState current, int groupId, BytesRef bytes) { + current.add(groupId, SpatialAggregationUtils.decode(bytes)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingState.java new file mode 100644 index 0000000000000..9ce0ccdda0ff5 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingState.java @@ -0,0 +1,154 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.compute.aggregation.AbstractArrayState; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +import java.nio.ByteOrder; + +final class SpatialExtentGroupingState extends AbstractArrayState { + private final PointType pointType; + private IntArray minXs; + private IntArray maxXs; + private IntArray maxYs; + private IntArray minYs; + + SpatialExtentGroupingState(PointType pointType) { + this(pointType, BigArrays.NON_RECYCLING_INSTANCE); + } + + SpatialExtentGroupingState(PointType pointType, BigArrays bigArrays) { + super(bigArrays); + this.pointType = pointType; + this.minXs = bigArrays.newIntArray(0, false); + this.maxXs = bigArrays.newIntArray(0, false); + this.maxYs = bigArrays.newIntArray(0, false); + this.minYs = bigArrays.newIntArray(0, false); + enableGroupIdTracking(new SeenGroupIds.Empty()); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset; + try ( + var minXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxYsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var minYsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + ) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int group = selected.getInt(i); + assert hasValue(group); + minXsBuilder.appendInt(minXs.get(group)); + maxXsBuilder.appendInt(maxXs.get(group)); + maxYsBuilder.appendInt(maxYs.get(group)); + minYsBuilder.appendInt(minYs.get(group)); + } + blocks[offset + 0] = minXsBuilder.build(); + blocks[offset + 1] = maxXsBuilder.build(); + blocks[offset + 2] = maxYsBuilder.build(); + blocks[offset + 3] = minYsBuilder.build(); + } + } + + public void add(int groupId, Geometry geometry) { + ensureCapacity(groupId); + pointType.computeEnvelope(geometry) + .ifPresent( + r -> add( + groupId, + pointType.encodeX(r.getMinX()), + pointType.encodeX(r.getMaxX()), + pointType.encodeY(r.getMaxY()), + pointType.encodeY(r.getMinY()) + ) + ); + } + + public void add(int groupId, long encoded) { + int x = pointType.extractX(encoded); + int y = pointType.extractY(encoded); + add(groupId, x, x, y, y); + } + + public void add(int groupId, int minX, int maxX, int maxY, int minY) { + ensureCapacity(groupId); + if (hasValue(groupId)) { + minXs.set(groupId, Math.min(minXs.get(groupId), minX)); + maxXs.set(groupId, Math.max(maxXs.get(groupId), maxX)); + maxYs.set(groupId, Math.max(maxYs.get(groupId), maxY)); + minYs.set(groupId, Math.min(minYs.get(groupId), minY)); + } else { + minXs.set(groupId, minX); + maxXs.set(groupId, maxX); + maxYs.set(groupId, maxY); + minYs.set(groupId, minY); + } + trackGroupId(groupId); + } + + private void ensureCapacity(int groupId) { + long requiredSize = groupId + 1; + if (minXs.size() < requiredSize) { + assert minXs.size() == maxXs.size() && minXs.size() == maxYs.size() && minXs.size() == minYs.size(); + minXs = bigArrays.grow(minXs, requiredSize); + maxXs = bigArrays.grow(maxXs, requiredSize); + maxYs = bigArrays.grow(maxYs, requiredSize); + minYs = bigArrays.grow(minYs, requiredSize); + } + } + + public Block toBlock(IntVector selected, DriverContext driverContext) { + try (var builder = driverContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int si = selected.getInt(i); + if (hasValue(si)) { + builder.appendBytesRef( + new BytesRef( + WellKnownBinary.toWKB( + new Rectangle( + pointType.decodeX(minXs.get(si)), + pointType.decodeX(maxXs.get(si)), + pointType.decodeY(maxYs.get(si)), + pointType.decodeY(minYs.get(si)) + ), + ByteOrder.LITTLE_ENDIAN + ) + ) + ); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + + public void add(int groupId, SpatialExtentGroupingState inState, int inPosition) { + ensureCapacity(groupId); + if (inState.hasValue(inPosition)) { + add( + groupId, + inState.minXs.get(inPosition), + inState.maxXs.get(inPosition), + inState.maxYs.get(inPosition), + inState.minYs.get(inPosition) + ); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingStateWrappedLongitudeState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingStateWrappedLongitudeState.java new file mode 100644 index 0000000000000..3dd7a6d4acde2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingStateWrappedLongitudeState.java @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.compute.aggregation.AbstractArrayState; +import org.elasticsearch.compute.aggregation.GroupingAggregatorState; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +import java.nio.ByteOrder; + +final class SpatialExtentGroupingStateWrappedLongitudeState extends AbstractArrayState implements GroupingAggregatorState { + // Only geo points support longitude wrapping. + private static final PointType POINT_TYPE = PointType.GEO; + private IntArray minNegXs; + private IntArray minPosXs; + private IntArray maxNegXs; + private IntArray maxPosXs; + private IntArray maxYs; + private IntArray minYs; + + private GeoPointEnvelopeVisitor geoPointVisitor = new GeoPointEnvelopeVisitor(); + + SpatialExtentGroupingStateWrappedLongitudeState() { + this(BigArrays.NON_RECYCLING_INSTANCE); + } + + SpatialExtentGroupingStateWrappedLongitudeState(BigArrays bigArrays) { + super(bigArrays); + this.minNegXs = bigArrays.newIntArray(0, false); + this.minPosXs = bigArrays.newIntArray(0, false); + this.maxNegXs = bigArrays.newIntArray(0, false); + this.maxPosXs = bigArrays.newIntArray(0, false); + this.maxYs = bigArrays.newIntArray(0, false); + this.minYs = bigArrays.newIntArray(0, false); + enableGroupIdTracking(new SeenGroupIds.Empty()); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset; + try ( + var minNegXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var minPosXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxNegXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxPosXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxYsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var minYsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + ) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int group = selected.getInt(i); + assert hasValue(group); + assert minNegXs.get(group) <= 0 == maxNegXs.get(group) <= 0; + assert minPosXs.get(group) >= 0 == maxPosXs.get(group) >= 0; + minNegXsBuilder.appendInt(minNegXs.get(group)); + minPosXsBuilder.appendInt(minPosXs.get(group)); + maxNegXsBuilder.appendInt(maxNegXs.get(group)); + maxPosXsBuilder.appendInt(maxPosXs.get(group)); + maxYsBuilder.appendInt(maxYs.get(group)); + minYsBuilder.appendInt(minYs.get(group)); + } + blocks[offset + 0] = minNegXsBuilder.build(); + blocks[offset + 1] = minPosXsBuilder.build(); + blocks[offset + 2] = maxNegXsBuilder.build(); + blocks[offset + 3] = maxPosXsBuilder.build(); + blocks[offset + 4] = maxYsBuilder.build(); + blocks[offset + 5] = minYsBuilder.build(); + } + } + + public void add(int groupId, Geometry geo) { + ensureCapacity(groupId); + geoPointVisitor.reset(); + if (geo.visit(new SpatialEnvelopeVisitor(geoPointVisitor))) { + add( + groupId, + SpatialAggregationUtils.encodeNegativeLongitude(geoPointVisitor.getMinNegX()), + SpatialAggregationUtils.encodePositiveLongitude(geoPointVisitor.getMinPosX()), + SpatialAggregationUtils.encodeNegativeLongitude(geoPointVisitor.getMaxNegX()), + SpatialAggregationUtils.encodePositiveLongitude(geoPointVisitor.getMaxPosX()), + POINT_TYPE.encodeY(geoPointVisitor.getMaxY()), + POINT_TYPE.encodeY(geoPointVisitor.getMinY()) + ); + } + } + + public void add(int groupId, SpatialExtentGroupingStateWrappedLongitudeState inState, int inPosition) { + ensureCapacity(groupId); + if (inState.hasValue(inPosition)) { + add( + groupId, + inState.minNegXs.get(inPosition), + inState.minPosXs.get(inPosition), + inState.maxNegXs.get(inPosition), + inState.maxPosXs.get(inPosition), + inState.maxYs.get(inPosition), + inState.minYs.get(inPosition) + ); + } + } + + public void add(int groupId, long encoded) { + int x = POINT_TYPE.extractX(encoded); + int y = POINT_TYPE.extractY(encoded); + add(groupId, x, x, x, x, y, y); + } + + public void add(int groupId, int minNegX, int minPosX, int maxNegX, int maxPosX, int maxY, int minY) { + ensureCapacity(groupId); + if (hasValue(groupId)) { + minNegXs.set(groupId, Math.min(minNegXs.get(groupId), minNegX)); + minPosXs.set(groupId, SpatialAggregationUtils.minPos(minPosXs.get(groupId), minPosX)); + maxNegXs.set(groupId, SpatialAggregationUtils.maxNeg(maxNegXs.get(groupId), maxNegX)); + maxPosXs.set(groupId, Math.max(maxPosXs.get(groupId), maxPosX)); + maxYs.set(groupId, Math.max(maxYs.get(groupId), maxY)); + minYs.set(groupId, Math.min(minYs.get(groupId), minY)); + } else { + minNegXs.set(groupId, minNegX); + minPosXs.set(groupId, minPosX); + maxNegXs.set(groupId, maxNegX); + maxPosXs.set(groupId, maxPosX); + maxYs.set(groupId, maxY); + minYs.set(groupId, minY); + } + assert minNegX <= 0 == maxNegX <= 0 : "minNegX=" + minNegX + " maxNegX=" + maxNegX; + assert minPosX >= 0 == maxPosX >= 0 : "minPosX=" + minPosX + " maxPosX=" + maxPosX; + trackGroupId(groupId); + } + + private void ensureCapacity(int groupId) { + long requiredSize = groupId + 1; + if (minNegXs.size() < requiredSize) { + minNegXs = bigArrays.grow(minNegXs, requiredSize); + minPosXs = bigArrays.grow(minPosXs, requiredSize); + maxNegXs = bigArrays.grow(maxNegXs, requiredSize); + maxPosXs = bigArrays.grow(maxPosXs, requiredSize); + minYs = bigArrays.grow(minYs, requiredSize); + maxYs = bigArrays.grow(maxYs, requiredSize); + } + } + + public Block toBlock(IntVector selected, DriverContext driverContext) { + try (var builder = driverContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int si = selected.getInt(i); + if (hasValue(si)) { + builder.appendBytesRef( + new BytesRef( + WellKnownBinary.toWKB( + SpatialAggregationUtils.asRectangle( + minNegXs.get(si), + minPosXs.get(si), + maxNegXs.get(si), + maxPosXs.get(si), + maxYs.get(si), + minYs.get(si) + ), + ByteOrder.LITTLE_ENDIAN + ) + ) + ); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentLongitudeWrappingAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentLongitudeWrappingAggregator.java new file mode 100644 index 0000000000000..80ba2d5e45658 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentLongitudeWrappingAggregator.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +// A bit of abuse of notation here, since we're extending this class to "inherit" its static methods. +// Unfortunately, this is the way it has to be done, since the generated code invokes these methods statically. +abstract class SpatialExtentLongitudeWrappingAggregator { + public static void combineIntermediate( + SpatialExtentStateWrappedLongitudeState current, + int minNegX, + int minPosX, + int maxNegX, + int maxPosX, + int maxY, + int minY + ) { + current.add(minNegX, minPosX, maxNegX, maxPosX, maxY, minY); + } + + public static void combineIntermediate( + SpatialExtentGroupingStateWrappedLongitudeState current, + int groupId, + int minNegX, + int minPosX, + int maxNegX, + int maxPosX, + int maxY, + int minY + ) { + current.add(groupId, minNegX, minPosX, maxNegX, maxPosX, maxY, minY); + } + + public static Block evaluateFinal(SpatialExtentStateWrappedLongitudeState state, DriverContext driverContext) { + return state.toBlock(driverContext); + } + + public static Block evaluateFinal( + SpatialExtentGroupingStateWrappedLongitudeState state, + IntVector selected, + DriverContext driverContext + ) { + return state.toBlock(selected, driverContext); + } + + public static void combineStates( + SpatialExtentGroupingStateWrappedLongitudeState current, + int groupId, + SpatialExtentGroupingStateWrappedLongitudeState inState, + int inPosition + ) { + current.add(groupId, inState, inPosition); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentState.java new file mode 100644 index 0000000000000..0eea9b79f73ea --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentState.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +import java.nio.ByteOrder; + +final class SpatialExtentState implements AggregatorState { + private final PointType pointType; + private boolean seen = false; + private int minX = Integer.MAX_VALUE; + private int maxX = Integer.MIN_VALUE; + private int maxY = Integer.MIN_VALUE; + private int minY = Integer.MAX_VALUE; + + SpatialExtentState(PointType pointType) { + this.pointType = pointType; + } + + @Override + public void close() {} + + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 4; + var blockFactory = driverContext.blockFactory(); + blocks[offset + 0] = blockFactory.newConstantIntBlockWith(minX, 1); + blocks[offset + 1] = blockFactory.newConstantIntBlockWith(maxX, 1); + blocks[offset + 2] = blockFactory.newConstantIntBlockWith(maxY, 1); + blocks[offset + 3] = blockFactory.newConstantIntBlockWith(minY, 1); + } + + public void add(Geometry geo) { + pointType.computeEnvelope(geo) + .ifPresent( + r -> add( + pointType.encodeX(r.getMinX()), + pointType.encodeX(r.getMaxX()), + pointType.encodeY(r.getMaxY()), + pointType.encodeY(r.getMinY()) + ) + ); + } + + public void add(int minX, int maxX, int maxY, int minY) { + seen = true; + this.minX = Math.min(this.minX, minX); + this.maxX = Math.max(this.maxX, maxX); + this.maxY = Math.max(this.maxY, maxY); + this.minY = Math.min(this.minY, minY); + } + + public void add(long encoded) { + int x = pointType.extractX(encoded); + int y = pointType.extractY(encoded); + add(x, x, y, y); + } + + public Block toBlock(DriverContext driverContext) { + var factory = driverContext.blockFactory(); + return seen ? factory.newConstantBytesRefBlockWith(new BytesRef(toWKB()), 1) : factory.newConstantNullBlock(1); + } + + private byte[] toWKB() { + return WellKnownBinary.toWKB( + new Rectangle(pointType.decodeX(minX), pointType.decodeX(maxX), pointType.decodeY(maxY), pointType.decodeY(minY)), + ByteOrder.LITTLE_ENDIAN + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentStateWrappedLongitudeState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentStateWrappedLongitudeState.java new file mode 100644 index 0000000000000..99200d2ed99f5 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentStateWrappedLongitudeState.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +import java.nio.ByteOrder; + +final class SpatialExtentStateWrappedLongitudeState implements AggregatorState { + // Only geo points support longitude wrapping. + private static final PointType POINT_TYPE = PointType.GEO; + private boolean seen = false; + private int minNegX = SpatialAggregationUtils.DEFAULT_NEG; + private int minPosX = SpatialAggregationUtils.DEFAULT_POS; + private int maxNegX = SpatialAggregationUtils.DEFAULT_NEG; + private int maxPosX = SpatialAggregationUtils.DEFAULT_POS; + private int maxY = Integer.MIN_VALUE; + private int minY = Integer.MAX_VALUE; + + private GeoPointEnvelopeVisitor geoPointVisitor = new GeoPointEnvelopeVisitor(); + + @Override + public void close() {} + + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 6; + var blockFactory = driverContext.blockFactory(); + blocks[offset + 0] = blockFactory.newConstantIntBlockWith(minNegX, 1); + blocks[offset + 1] = blockFactory.newConstantIntBlockWith(minPosX, 1); + blocks[offset + 2] = blockFactory.newConstantIntBlockWith(maxNegX, 1); + blocks[offset + 3] = blockFactory.newConstantIntBlockWith(maxPosX, 1); + blocks[offset + 4] = blockFactory.newConstantIntBlockWith(maxY, 1); + blocks[offset + 5] = blockFactory.newConstantIntBlockWith(minY, 1); + } + + public void add(Geometry geo) { + geoPointVisitor.reset(); + if (geo.visit(new SpatialEnvelopeVisitor(geoPointVisitor))) { + add( + SpatialAggregationUtils.encodeNegativeLongitude(geoPointVisitor.getMinNegX()), + SpatialAggregationUtils.encodePositiveLongitude(geoPointVisitor.getMinPosX()), + SpatialAggregationUtils.encodeNegativeLongitude(geoPointVisitor.getMaxNegX()), + SpatialAggregationUtils.encodePositiveLongitude(geoPointVisitor.getMaxPosX()), + POINT_TYPE.encodeY(geoPointVisitor.getMaxY()), + POINT_TYPE.encodeY(geoPointVisitor.getMinY()) + ); + } + } + + public void add(int minNegX, int minPosX, int maxNegX, int maxPosX, int maxY, int minY) { + seen = true; + this.minNegX = Math.min(this.minNegX, minNegX); + this.minPosX = SpatialAggregationUtils.minPos(this.minPosX, minPosX); + this.maxNegX = SpatialAggregationUtils.maxNeg(this.maxNegX, maxNegX); + this.maxPosX = Math.max(this.maxPosX, maxPosX); + this.maxY = Math.max(this.maxY, maxY); + this.minY = Math.min(this.minY, minY); + assert this.minNegX <= 0 == this.maxNegX <= 0 : "minNegX=" + this.minNegX + " maxNegX=" + this.maxNegX; + assert this.minPosX >= 0 == this.maxPosX >= 0 : "minPosX=" + this.minPosX + " maxPosX=" + this.maxPosX; + } + + public void add(long encoded) { + int x = POINT_TYPE.extractX(encoded); + int y = POINT_TYPE.extractY(encoded); + add(x, x, x, x, y, y); + } + + public Block toBlock(DriverContext driverContext) { + var factory = driverContext.blockFactory(); + return seen ? factory.newConstantBytesRefBlockWith(new BytesRef(toWKB()), 1) : factory.newConstantNullBlock(1); + } + + private byte[] toWKB() { + return WellKnownBinary.toWKB( + SpatialAggregationUtils.asRectangle(minNegX, minPosX, maxNegX, maxPosX, maxY, minY), + ByteOrder.LITTLE_ENDIAN + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index 6f8386ec08de1..ccddfdf5cc74a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -51,7 +51,13 @@ public Operator get(DriverContext driverContext) { if (groups.stream().anyMatch(BlockHash.GroupSpec::isCategorize)) { return new HashAggregationOperator( aggregators, - () -> BlockHash.buildCategorizeBlockHash(groups, aggregatorMode, driverContext.blockFactory(), analysisRegistry), + () -> BlockHash.buildCategorizeBlockHash( + groups, + aggregatorMode, + driverContext.blockFactory(), + analysisRegistry, + maxPageSize + ), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index f8428b7c33568..587deda650a23 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -130,9 +130,6 @@ public void close() { } finally { page.releaseBlocks(); } - - // TODO: randomize values? May give wrong results - // TODO: assert the categorizer state after adding pages. } public void testCategorizeRawMultivalue() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java new file mode 100644 index 0000000000000..cfa023af3d18a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.CannedSourceOperator; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.HashAggregationOperator; +import org.elasticsearch.compute.operator.LocalSourceOperator; +import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.scanners.StablePluginsRegistry; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.compute.operator.OperatorTestCase.runDriver; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { + + private AnalysisRegistry analysisRegistry; + + @Before + private void initAnalysisRegistry() throws IOException { + analysisRegistry = new AnalysisModule( + TestEnvironment.newEnvironment( + Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() + ), + List.of(new MachineLearning(Settings.EMPTY), new CommonAnalysisPlugin()), + new StablePluginsRegistry() + ).getAnalysisRegistry(); + } + + public void testCategorize_withDriver() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + DriverContext driverContext = new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + boolean withNull = randomBoolean(); + boolean withMultivalues = randomBoolean(); + + List groupSpecs = List.of( + new BlockHash.GroupSpec(0, ElementType.BYTES_REF, true), + new BlockHash.GroupSpec(1, ElementType.INT, false) + ); + + LocalSourceOperator.BlockSupplier input1 = () -> { + try ( + BytesRefBlock.Builder messagesBuilder = driverContext.blockFactory().newBytesRefBlockBuilder(10); + IntBlock.Builder idsBuilder = driverContext.blockFactory().newIntBlockBuilder(10) + ) { + if (withMultivalues) { + messagesBuilder.beginPositionEntry(); + } + messagesBuilder.appendBytesRef(new BytesRef("connected to 1.1.1")); + messagesBuilder.appendBytesRef(new BytesRef("connected to 1.1.2")); + if (withMultivalues) { + messagesBuilder.endPositionEntry(); + } + idsBuilder.appendInt(7); + if (withMultivalues == false) { + idsBuilder.appendInt(7); + } + + messagesBuilder.appendBytesRef(new BytesRef("connected to 1.1.3")); + messagesBuilder.appendBytesRef(new BytesRef("connection error")); + messagesBuilder.appendBytesRef(new BytesRef("connection error")); + messagesBuilder.appendBytesRef(new BytesRef("connected to 1.1.4")); + idsBuilder.appendInt(42); + idsBuilder.appendInt(7); + idsBuilder.appendInt(42); + idsBuilder.appendInt(7); + + if (withNull) { + messagesBuilder.appendNull(); + idsBuilder.appendInt(43); + } + return new Block[] { messagesBuilder.build(), idsBuilder.build() }; + } + }; + LocalSourceOperator.BlockSupplier input2 = () -> { + try ( + BytesRefBlock.Builder messagesBuilder = driverContext.blockFactory().newBytesRefBlockBuilder(10); + IntBlock.Builder idsBuilder = driverContext.blockFactory().newIntBlockBuilder(10) + ) { + messagesBuilder.appendBytesRef(new BytesRef("connected to 2.1.1")); + messagesBuilder.appendBytesRef(new BytesRef("connected to 2.1.2")); + messagesBuilder.appendBytesRef(new BytesRef("disconnected")); + messagesBuilder.appendBytesRef(new BytesRef("connection error")); + idsBuilder.appendInt(111); + idsBuilder.appendInt(7); + idsBuilder.appendInt(7); + idsBuilder.appendInt(42); + if (withNull) { + messagesBuilder.appendNull(); + idsBuilder.appendNull(); + } + return new Block[] { messagesBuilder.build(), idsBuilder.build() }; + } + }; + + List intermediateOutput = new ArrayList<>(); + + Driver driver = new Driver( + driverContext, + new LocalSourceOperator(input1), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + groupSpecs, + AggregatorMode.INITIAL, + List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(0)).groupingAggregatorFactory(AggregatorMode.INITIAL)), + 16 * 1024, + analysisRegistry + ).get(driverContext) + ), + new PageConsumerOperator(intermediateOutput::add), + () -> {} + ); + runDriver(driver); + + driver = new Driver( + driverContext, + new LocalSourceOperator(input2), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + groupSpecs, + AggregatorMode.INITIAL, + List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(0)).groupingAggregatorFactory(AggregatorMode.INITIAL)), + 16 * 1024, + analysisRegistry + ).get(driverContext) + ), + new PageConsumerOperator(intermediateOutput::add), + () -> {} + ); + runDriver(driver); + + List finalOutput = new ArrayList<>(); + + driver = new Driver( + driverContext, + new CannedSourceOperator(intermediateOutput.iterator()), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + groupSpecs, + AggregatorMode.FINAL, + List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(2)).groupingAggregatorFactory(AggregatorMode.FINAL)), + 16 * 1024, + analysisRegistry + ).get(driverContext) + ), + new PageConsumerOperator(finalOutput::add), + () -> {} + ); + runDriver(driver); + + assertThat(finalOutput, hasSize(1)); + assertThat(finalOutput.get(0).getBlockCount(), equalTo(3)); + BytesRefBlock outputMessages = finalOutput.get(0).getBlock(0); + IntBlock outputIds = finalOutput.get(0).getBlock(1); + BytesRefBlock outputValues = finalOutput.get(0).getBlock(2); + assertThat(outputIds.getPositionCount(), equalTo(outputMessages.getPositionCount())); + assertThat(outputValues.getPositionCount(), equalTo(outputMessages.getPositionCount())); + Map>> result = new HashMap<>(); + for (int i = 0; i < outputMessages.getPositionCount(); i++) { + BytesRef messageBytesRef = ((BytesRef) BlockUtils.toJavaObject(outputMessages, i)); + String message = messageBytesRef == null ? null : messageBytesRef.utf8ToString(); + result.computeIfAbsent(message, key -> new HashMap<>()); + + Integer id = (Integer) BlockUtils.toJavaObject(outputIds, i); + result.get(message).computeIfAbsent(id, key -> new HashSet<>()); + + Object values = BlockUtils.toJavaObject(outputValues, i); + if (values == null) { + result.get(message).get(id).add(null); + } else { + if ((values instanceof List) == false) { + values = List.of(values); + } + for (Object valueObject : (List) values) { + BytesRef value = (BytesRef) valueObject; + result.get(message).get(id).add(value.utf8ToString()); + } + } + } + Releasables.close(() -> Iterators.map(finalOutput.iterator(), (Page p) -> p::releaseBlocks)); + + Map>> expectedResult = Map.of( + ".*?connected.+?to.*?", + Map.of( + 7, + Set.of("connected to 1.1.1", "connected to 1.1.2", "connected to 1.1.4", "connected to 2.1.2"), + 42, + Set.of("connected to 1.1.3"), + 111, + Set.of("connected to 2.1.1") + ), + ".*?connection.+?error.*?", + Map.of(7, Set.of("connection error"), 42, Set.of("connection error")), + ".*?disconnected.*?", + Map.of(7, Set.of("disconnected")) + ); + if (withNull) { + expectedResult = new HashMap<>(expectedResult); + expectedResult.put(null, new HashMap<>()); + expectedResult.get(null).put(null, new HashSet<>()); + expectedResult.get(null).get(null).add(null); + expectedResult.get(null).put(43, new HashSet<>()); + expectedResult.get(null).get(43).add(null); + } + assertThat(result, equalTo(expectedResult)); + } +} diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index eac5d5764d4b2..971eed1c6795d 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -27,6 +27,7 @@ restResources { dependencies { javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) javaRestTestImplementation project(xpackModule('esql:qa:server')) + javaRestTestImplementation project(xpackModule('esql')) } GradleUtils.extendSourceSet(project, "javaRestTest", "yamlRestTest") diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 801e1d12b1d4a..1120a69cc5166 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -18,8 +18,10 @@ import org.junit.ClassRule; import java.io.IOException; +import java.util.List; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V5; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC; public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @@ -92,6 +94,11 @@ protected boolean supportsInferenceTestService() { return false; } + @Override + protected boolean supportsIndexModeLookup() throws IOException { + return hasCapabilities(List.of(JOIN_LOOKUP_V5.capabilityName())); + } + @Override protected boolean deduplicateExactWarnings() { /* diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index e658d169cbce8..5c7f981c93a97 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -48,7 +48,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V4; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V5; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,7 +124,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V4.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V5.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { @@ -280,4 +280,11 @@ protected boolean enableRoundingDoubleValuesOnAsserting() { protected boolean supportsInferenceTestService() { return false; } + + @Override + protected boolean supportsIndexModeLookup() throws IOException { + // CCS does not yet support JOIN_LOOKUP_V5 and clusters falsely report they have this capability + // return hasCapabilities(List.of(JOIN_LOOKUP_V5.capabilityName())); + return false; + } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 2484a428c4b03..e7bce73e21cdd 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -135,8 +135,8 @@ public void setup() throws IOException { createInferenceEndpoint(client()); } - if (indexExists(availableDatasetsForEs(client()).iterator().next().indexName()) == false) { - loadDataSetIntoEs(client()); + if (indexExists(availableDatasetsForEs(client(), supportsIndexModeLookup()).iterator().next().indexName()) == false) { + loadDataSetIntoEs(client(), supportsIndexModeLookup()); } } @@ -182,12 +182,30 @@ protected void shouldSkipTest(String testName) throws IOException { protected static void checkCapabilities(RestClient client, TestFeatureService testFeatureService, String testName, CsvTestCase testCase) throws IOException { - if (testCase.requiredCapabilities.isEmpty()) { + if (hasCapabilities(client, testCase.requiredCapabilities)) { return; } + + var features = new EsqlFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet()); + + for (String feature : testCase.requiredCapabilities) { + var esqlFeature = "esql." + feature; + assumeTrue("Requested capability " + feature + " is an ESQL cluster feature", features.contains(esqlFeature)); + assumeTrue("Test " + testName + " requires " + feature, testFeatureService.clusterHasFeature(esqlFeature)); + } + } + + protected static boolean hasCapabilities(List requiredCapabilities) throws IOException { + return hasCapabilities(adminClient(), requiredCapabilities); + } + + protected static boolean hasCapabilities(RestClient client, List requiredCapabilities) throws IOException { + if (requiredCapabilities.isEmpty()) { + return true; + } try { - if (clusterHasCapability(client, "POST", "/_query", List.of(), testCase.requiredCapabilities).orElse(false)) { - return; + if (clusterHasCapability(client, "POST", "/_query", List.of(), requiredCapabilities).orElse(false)) { + return true; } LOGGER.info("capabilities API returned false, we might be in a mixed version cluster so falling back to cluster features"); } catch (ResponseException e) { @@ -206,20 +224,17 @@ protected static void checkCapabilities(RestClient client, TestFeatureService te throw e; } } - - var features = new EsqlFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet()); - - for (String feature : testCase.requiredCapabilities) { - var esqlFeature = "esql." + feature; - assumeTrue("Requested capability " + feature + " is an ESQL cluster feature", features.contains(esqlFeature)); - assumeTrue("Test " + testName + " requires " + feature, testFeatureService.clusterHasFeature(esqlFeature)); - } + return false; } protected boolean supportsInferenceTestService() { return true; } + protected boolean supportsIndexModeLookup() throws IOException { + return true; + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 63c184e973cde..588d5870d89ec 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -46,7 +46,7 @@ public abstract class GenerativeRestTest extends ESRestTestCase { @Before public void setup() throws IOException { if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) { - loadDataSetIntoEs(client()); + loadDataSetIntoEs(client(), true); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 34af1edb9f99b..3b656ded94dd7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -63,6 +63,8 @@ public class CsvTestsDataLoader { private static final TestsDataset LANGUAGES = new TestsDataset("languages"); private static final TestsDataset LANGUAGES_LOOKUP = LANGUAGES.withIndex("languages_lookup") .withSetting("languages_lookup-settings.json"); + private static final TestsDataset LANGUAGES_LOOKUP_NON_UNIQUE_KEY = LANGUAGES_LOOKUP.withIndex("languages_lookup_non_unique_key") + .withData("languages_non_unique_key.csv"); private static final TestsDataset ALERTS = new TestsDataset("alerts"); private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs"); private static final TestsDataset SAMPLE_DATA = new TestsDataset("sample_data"); @@ -114,6 +116,7 @@ public class CsvTestsDataLoader { Map.entry(APPS_SHORT.indexName, APPS_SHORT), Map.entry(LANGUAGES.indexName, LANGUAGES), Map.entry(LANGUAGES_LOOKUP.indexName, LANGUAGES_LOOKUP), + Map.entry(LANGUAGES_LOOKUP_NON_UNIQUE_KEY.indexName, LANGUAGES_LOOKUP_NON_UNIQUE_KEY), Map.entry(UL_LOGS.indexName, UL_LOGS), Map.entry(SAMPLE_DATA.indexName, SAMPLE_DATA), Map.entry(MV_SAMPLE_DATA.indexName, MV_SAMPLE_DATA), @@ -235,7 +238,7 @@ public static void main(String[] args) throws IOException { } try (RestClient client = builder.build()) { - loadDataSetIntoEs(client, (restClient, indexName, indexMapping, indexSettings) -> { + loadDataSetIntoEs(client, true, (restClient, indexName, indexMapping, indexSettings) -> { // don't use ESRestTestCase methods here or, if you do, test running the main method before making the change StringBuilder jsonBody = new StringBuilder("{"); if (indexSettings != null && indexSettings.isEmpty() == false) { @@ -254,26 +257,28 @@ public static void main(String[] args) throws IOException { } } - public static Set availableDatasetsForEs(RestClient client) throws IOException { + public static Set availableDatasetsForEs(RestClient client, boolean supportsIndexModeLookup) throws IOException { boolean inferenceEnabled = clusterHasInferenceEndpoint(client); return CSV_DATASET_MAP.values() .stream() .filter(d -> d.requiresInferenceEndpoint == false || inferenceEnabled) + .filter(d -> supportsIndexModeLookup || d.indexName.endsWith("_lookup") == false) // TODO: use actual index settings .collect(Collectors.toCollection(HashSet::new)); } - public static void loadDataSetIntoEs(RestClient client) throws IOException { - loadDataSetIntoEs(client, (restClient, indexName, indexMapping, indexSettings) -> { + public static void loadDataSetIntoEs(RestClient client, boolean supportsIndexModeLookup) throws IOException { + loadDataSetIntoEs(client, supportsIndexModeLookup, (restClient, indexName, indexMapping, indexSettings) -> { ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null); }); } - private static void loadDataSetIntoEs(RestClient client, IndexCreator indexCreator) throws IOException { + private static void loadDataSetIntoEs(RestClient client, boolean supportsIndexModeLookup, IndexCreator indexCreator) + throws IOException { Logger logger = LogManager.getLogger(CsvTestsDataLoader.class); Set loadedDatasets = new HashSet<>(); - for (var dataset : availableDatasetsForEs(client)) { + for (var dataset : availableDatasetsForEs(client, supportsIndexModeLookup)) { load(client, dataset, logger, indexCreator); loadedDatasets.add(dataset.indexName); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec index 4ce43961a7077..5ad62dd7a21a8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec @@ -60,6 +60,19 @@ COUNT():long | VALUES(str):keyword | category:keyword 1 | [a, b, c] | .*?disconnected.*? ; +limit before stats +required_capability: categorize_v5 + +FROM sample_data | SORT message | LIMIT 4 + | STATS count=COUNT() BY category=CATEGORIZE(message) + | SORT category +; + +count:long | category:keyword + 3 | .*?Connected.+?to.*? + 1 | .*?Connection.+?error.*? +; + skips stopwords required_capability: categorize_v5 @@ -615,3 +628,159 @@ COUNT():long | x:keyword 3 | [.*?Connection.+?error.*?,.*?Connection.+?error.*?] 1 | [.*?Disconnected.*?,.*?Disconnected.*?] ; + +multiple groupings with categorize and ip +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=COUNT() BY category=CATEGORIZE(message), client_ip + | SORT category, client_ip +; + +count:long | category:keyword | client_ip:ip + 1 | .*?Connected.+?to.*? | 172.21.2.113 + 1 | .*?Connected.+?to.*? | 172.21.2.162 + 1 | .*?Connected.+?to.*? | 172.21.3.15 + 3 | .*?Connection.+?error.*? | 172.21.3.15 + 1 | .*?Disconnected.*? | 172.21.0.5 +; + +multiple groupings with categorize and bucketed timestamp +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=COUNT() BY category=CATEGORIZE(message), timestamp=BUCKET(@timestamp, 1 HOUR) + | SORT category, timestamp +; + +count:long | category:keyword | timestamp:datetime + 2 | .*?Connected.+?to.*? | 2023-10-23T12:00:00.000Z + 1 | .*?Connected.+?to.*? | 2023-10-23T13:00:00.000Z + 3 | .*?Connection.+?error.*? | 2023-10-23T13:00:00.000Z + 1 | .*?Disconnected.*? | 2023-10-23T13:00:00.000Z +; + + +multiple groupings with categorize and limit before stats +required_capability: categorize_multiple_groupings + +FROM sample_data | SORT message | LIMIT 5 + | STATS count=COUNT() BY category=CATEGORIZE(message), client_ip + | SORT category, client_ip +; + +count:long | category:keyword | client_ip:ip + 1 | .*?Connected.+?to.*? | 172.21.2.113 + 1 | .*?Connected.+?to.*? | 172.21.2.162 + 1 | .*?Connected.+?to.*? | 172.21.3.15 + 2 | .*?Connection.+?error.*? | 172.21.3.15 +; + +multiple groupings with categorize and nulls +required_capability: categorize_multiple_groupings + +FROM employees + | STATS SUM(languages) BY category=CATEGORIZE(job_positions), gender + | SORT category DESC, gender ASC + | LIMIT 5 +; + +SUM(languages):long | category:keyword | gender:keyword + 11 | null | F + 16 | null | M + 14 | .*?Tech.+?Lead.*? | F + 23 | .*?Tech.+?Lead.*? | M + 9 | .*?Tech.+?Lead.*? | null +; + +multiple groupings with categorize and a field that's always null +required_capability: categorize_multiple_groupings + +FROM sample_data + | EVAL nullfield = null + | STATS count=COUNT() BY category=CATEGORIZE(nullfield), client_ip + | SORT client_ip +; + +count:long | category:keyword | client_ip:ip + 1 | null | 172.21.0.5 + 1 | null | 172.21.2.113 + 1 | null | 172.21.2.162 + 4 | null | 172.21.3.15 +; + +multiple groupings with categorize and the same text field +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=COUNT() BY category=CATEGORIZE(message), message + | SORT message +; + +count:long | category:keyword | message:keyword + 1 | .*?Connected.+?to.*? | Connected to 10.1.0.1 + 1 | .*?Connected.+?to.*? | Connected to 10.1.0.2 + 1 | .*?Connected.+?to.*? | Connected to 10.1.0.3 + 3 | .*?Connection.+?error.*? | Connection error + 1 | .*?Disconnected.*? | Disconnected +; + +multiple additional complex groupings with categorize +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=COUNT(), duration=SUM(event_duration) BY category=CATEGORIZE(message), SUBSTRING(message, 1, 7), ip_part=TO_LONG(SUBSTRING(TO_STRING(client_ip), 8, 1)), hour=BUCKET(@timestamp, 1 HOUR) + | SORT ip_part, category +; + +count:long | duration:long | category:keyword | SUBSTRING(message, 1, 7):keyword | ip_part:long | hour:datetime + 1 | 1232382 | .*?Disconnected.*? | Disconn | 0 | 2023-10-23T13:00:00.000Z + 2 | 6215122 | .*?Connected.+?to.*? | Connect | 2 | 2023-10-23T12:00:00.000Z + 1 | 1756467 | .*?Connected.+?to.*? | Connect | 3 | 2023-10-23T13:00:00.000Z + 3 | 14027356 | .*?Connection.+?error.*? | Connect | 3 | 2023-10-23T13:00:00.000Z +; + +multiple groupings with categorize and some constants including null +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=MV_COUNT(VALUES(message)) BY category=CATEGORIZE(message), null, constant="constant" + | SORT category +; + +count:integer | category:keyword | null:null | constant:keyword + 3 | .*?Connected.+?to.*? | null | constant + 1 | .*?Connection.+?error.*? | null | constant + 1 | .*?Disconnected.*? | null | constant +; + +multiple groupings with categorize and aggregation filters +required_capability: categorize_multiple_groupings + +FROM employees + | STATS lang_low=AVG(languages) WHERE salary<=50000, lang_high=AVG(languages) WHERE salary>50000 BY category=CATEGORIZE(job_positions), gender + | SORT category, gender + | LIMIT 5 +; + +lang_low:double | lang_high:double | category:keyword | gender:keyword + 2.0 | 5.0 | .*?Accountant.*? | F + 3.0 | 2.5 | .*?Accountant.*? | M + 5.0 | 2.0 | .*?Accountant.*? | null + 3.0 | 3.25 | .*?Architect.*? | F + 3.75 | null | .*?Architect.*? | M +; + +multiple groupings with categorize on null row +required_capability: categorize_multiple_groupings + +ROW message = null, str = ["a", "b", "c"] + | STATS COUNT(), VALUES(str) BY category=CATEGORIZE(message), str + | SORT str +; + +COUNT():long | VALUES(str):keyword | category:keyword | str:keyword + 1 | [a, b, c] | null | a + 1 | [a, b, c] | null | b + 1 | [a, b, c] | null | c +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index 66b035b18c07c..1ca77db9f7586 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -629,6 +629,80 @@ yr:date_nanos | mo:date_nanos | mn:date_nanos 2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T12:10:00.000000000Z | 2023-10-23T12:15:03.360000000Z ; +Bucket Date nanos by Year +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY yr = BUCKET(nanos, 1 year); + +ct:long | yr:date_nanos +8 | 2023-01-01T00:00:00.000000000Z +; + +Bucket Date nanos by Year, range version +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY yr = BUCKET(nanos, 5, "1999-01-01", NOW()); + +ct:long | yr:date_nanos +8 | 2023-01-01T00:00:00.000000000Z +; + +Bucket Date nanos by Month +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY mo = BUCKET(nanos, 1 month); + +ct:long | mo:date_nanos +8 | 2023-10-01T00:00:00.000000000Z +; + +Bucket Date nanos by Month, range version +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY mo = BUCKET(nanos, 20, "2023-01-01", "2023-12-31"); + +ct:long | mo:date_nanos +8 | 2023-10-01T00:00:00.000000000Z +; + +Bucket Date nanos by Week, range version +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY mo = BUCKET(nanos, 55, "2023-01-01", "2023-12-31"); + +ct:long | mo:date_nanos +8 | 2023-10-23T00:00:00.000000000Z +; +Bucket Date nanos by 10 minutes +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY mn = BUCKET(nanos, 10 minutes); + +ct:long | mn:date_nanos +4 | 2023-10-23T13:50:00.000000000Z +1 | 2023-10-23T13:30:00.000000000Z +1 | 2023-10-23T12:20:00.000000000Z +2 | 2023-10-23T12:10:00.000000000Z +; + Add date nanos required_capability: date_nanos_add_subtract diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec index 38f09d2e3c56e..cde5427bf37d6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec @@ -223,7 +223,8 @@ null | null | null ; -overwriteName +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +overwriteName#[skip:-8.12.99] from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | dissect full_name "%{emp_no} %{b}" | keep full_name, emp_no, b | limit 3; full_name:keyword | emp_no:keyword | b:keyword @@ -244,7 +245,8 @@ emp_no:integer | first_name:keyword | rest:keyword ; -overwriteNameWhere +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +overwriteNameWhere#[skip:-8.12.99] from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | dissect full_name "%{emp_no} %{b}" | where emp_no == "Bezalel" | keep full_name, emp_no, b | limit 3; full_name:keyword | emp_no:keyword | b:keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec index 98c88d06caa75..eece1bdfbffa4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec @@ -199,7 +199,8 @@ null | null | null ; -overwriteName +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +overwriteName#[skip:-8.12.99] from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | grok full_name "%{WORD:emp_no} %{WORD:b}" | keep full_name, emp_no, b | limit 3; full_name:keyword | emp_no:keyword | b:keyword @@ -209,7 +210,8 @@ Parto Bamford | Parto | Bamford ; -overwriteNameWhere +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +overwriteNameWhere#[skip:-8.12.99] from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | grok full_name "%{WORD:emp_no} %{WORD:b}" | where emp_no == "Bezalel" | keep full_name, emp_no, b | limit 3; full_name:keyword | emp_no:keyword | b:keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_non_unique_key.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_non_unique_key.csv new file mode 100644 index 0000000000000..1578762f8d1cb --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_non_unique_key.csv @@ -0,0 +1,10 @@ +language_code:integer,language_name:keyword,country:keyword +1,English,Canada +1,English, +1,,United Kingdom +1,English,United States of America +2,German,[Germany,Austria] +2,German,Switzerland +2,German, +4,Quenya, +5,,Atlantis diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index b01e12fa4f470..f6704d33934af 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -3,9 +3,8 @@ // Reuses the sample dataset and commands from enrich.csv-spec // -//TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) basicOnTheDataNode -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM employees | EVAL language_code = languages @@ -22,7 +21,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; basicRow -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW language_code = 1 | LOOKUP JOIN languages_lookup ON language_code @@ -33,7 +32,7 @@ language_code:integer | language_name:keyword ; basicOnTheCoordinator -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM employees | SORT emp_no @@ -50,7 +49,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; subsequentEvalOnTheDataNode -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM employees | EVAL language_code = languages @@ -68,7 +67,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; subsequentEvalOnTheCoordinator -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM employees | SORT emp_no @@ -85,8 +84,102 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x 10003 | 4 | german | 8 ; +sortEvalBeforeLookup +required_capability: join_lookup_v5 + +FROM employees +| SORT emp_no +| EVAL language_code = (emp_no % 10) + 1 +| LOOKUP JOIN languages_lookup ON language_code +| KEEP emp_no, language_code, language_name +| LIMIT 3 +; + +emp_no:integer | language_code:integer | language_name:keyword +10001 | 2 | French +10002 | 3 | Spanish +10003 | 4 | German +; + +nonUniqueLeftKeyOnTheDataNode +required_capability: join_lookup_v5 + +FROM employees +| WHERE emp_no <= 10030 +| EVAL language_code = emp_no % 10 +| WHERE language_code < 3 +| LOOKUP JOIN languages_lookup ON language_code +| SORT emp_no +| KEEP emp_no, language_code, language_name +; + +emp_no:integer | language_code:integer | language_name:keyword +10001 |1 | English +10002 |2 | French +10010 |0 | null +10011 |1 | English +10012 |2 | French +10020 |0 | null +10021 |1 | English +10022 |2 | French +10030 |0 | null +; + +nonUniqueRightKeyOnTheDataNode +required_capability: join_lookup_v5 + +FROM employees +| EVAL language_code = emp_no % 10 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| WHERE emp_no > 10090 AND emp_no < 10096 +| SORT emp_no +| EVAL country = MV_SORT(country) +| KEEP emp_no, language_code, language_name, country +; + +emp_no:integer | language_code:integer | language_name:keyword | country:keyword +10091 | 1 | [English, English, English] | [Canada, United Kingdom, United States of America] +10092 | 2 | [German, German, German] | [Austria, Germany, Switzerland] +10093 | 3 | null | null +10094 | 4 | Quenya | null +10095 | 5 | null | Atlantis +; + +nonUniqueRightKeyOnTheCoordinator +required_capability: join_lookup_v5 + +FROM employees +| SORT emp_no +| LIMIT 5 +| EVAL language_code = emp_no % 10 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| EVAL country = MV_SORT(country) +| KEEP emp_no, language_code, language_name, country +; + +emp_no:integer | language_code:integer | language_name:keyword | country:keyword +10001 | 1 | [English, English, English] | [Canada, United Kingdom, United States of America] +10002 | 2 | [German, German, German] | [Austria, Germany, Switzerland] +10003 | 3 | null | null +10004 | 4 | Quenya | null +10005 | 5 | null | Atlantis +; + +nonUniqueRightKeyFromRow +required_capability: join_lookup_v5 + +ROW language_code = 2 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| DROP country.keyword +| EVAL country = MV_SORT(country) +; + +language_code:integer | language_name:keyword | country:keyword +2 | [German, German, German] | [Austria, Germany, Switzerland] +; + lookupIPFromRow -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", client_ip = "172.21.0.5", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -97,7 +190,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowing -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -108,7 +201,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -121,7 +214,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeepReordered -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -134,7 +227,7 @@ right | Development | 172.21.0.5 ; lookupIPFromIndex -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -153,7 +246,7 @@ ignoreOrder:true ; lookupIPFromIndexKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -173,7 +266,7 @@ ignoreOrder:true ; lookupIPFromIndexStats -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -189,7 +282,7 @@ count:long | env:keyword ; lookupIPFromIndexStatsKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -206,7 +299,7 @@ count:long | env:keyword ; lookupMessageFromRow -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -217,7 +310,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowing -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -228,7 +321,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowingKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -240,7 +333,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromIndex -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -258,7 +351,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -277,7 +370,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepReordered -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -296,7 +389,7 @@ Success | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 ; lookupMessageFromIndexStats -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -311,7 +404,7 @@ count:long | type:keyword ; lookupMessageFromIndexStatsKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_no_doc_values.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_no_doc_values.json index d7097f89a17df..782fd40712f43 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_no_doc_values.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_no_doc_values.json @@ -24,7 +24,9 @@ "type": "keyword" }, "city_location": { - "type": "geo_point" + "type": "geo_point", + "index": true, + "doc_values": false } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index ac9948c90f5e9..8694c973448e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -519,6 +519,63 @@ centroid:geo_point | count:long POINT (42.97109629958868 14.7552534006536) | 1 ; +############################################### +# Tests for ST_EXTENT_AGG on GEO_POINT type + +stExtentSingleGeoPoint +required_capability: st_extent_agg +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| STATS extent = ST_EXTENT_AGG(point) +; + +extent:geo_shape +BBOX(42.97109629958868, 42.97109629958868, 14.7552534006536, 14.7552534006536) +; + +stExtentMultipleGeoPoints +required_capability: st_extent_agg +// tag::st_extent_agg-airports[] +FROM airports +| WHERE country == "India" +| STATS extent = ST_EXTENT_AGG(location) +// end::st_extent_agg-airports[] +; + +// tag::st_extent_agg-airports-result[] +extent:geo_shape +BBOX (70.77995480038226, 91.5882289968431, 33.9830909203738, 8.47650992218405) +// end::st_extent_agg-airports-result[] +; + +stExtentMultipleGeoPointsNoDocValues +required_capability: st_extent_agg +FROM airports_no_doc_values | WHERE country == "India" | STATS extent = ST_EXTENT_AGG(location) +; + +extent:geo_shape +BBOX (70.77995480038226, 91.5882289968431, 33.9830909203738, 8.47650992218405) +; + +stExtentMultipleGeoPointGrouping +required_capability: st_extent_agg +FROM airports | STATS extent = ST_EXTENT_AGG(location) BY country | SORT country | LIMIT 3 +; + +extent:geo_shape | country:keyword +BBOX (69.2100736219436, 69.2100736219436, 34.56339786294848, 34.56339786294848) | Afghanistan +BBOX (19.715032372623682, 19.715032372623682, 41.4208514476195, 41.4208514476195) | Albania +BBOX (-0.6067969836294651, 6.621946580708027, 36.69972063973546, 35.62027471605688) | Algeria +; + +stExtentGeoShapes +required_capability: st_extent_agg +FROM airport_city_boundaries | WHERE region == "City of New York" | STATS extent = ST_EXTENT_AGG(city_boundary) +; + +extent:geo_shape +BBOX (-74.25880000926554, -73.70020005851984, 40.91759996954352, 40.47659996431321) +; + ############################################### # Tests for ST_INTERSECTS on GEO_POINT type @@ -1698,6 +1755,48 @@ centroid:cartesian_point | count:long POINT (726480.0130685265 3359566.331716279) | 849 ; +############################################### +# Tests for ST_EXTENT_AGG on CARTESIAN_POINT type + +stExtentSingleCartesianPoint +required_capability: st_extent_agg +ROW point = TO_CARTESIANPOINT("POINT(429.7109629958868 147.552534006536)") +| STATS extent = ST_EXTENT_AGG(point) +; + +extent:cartesian_shape +BBOX (429.7109680175781, 429.7109680175781, 147.5525360107422, 147.5525360107422) +; + +stExtentMultipleCartesianPoints +required_capability: st_extent_agg +FROM airports_web | WHERE scalerank == 9 | STATS extent = ST_EXTENT_AGG(location) +; + +extent:cartesian_shape +BBOX (4783520.5, 1.6168486E7, 8704352.0, -584415.9375) +; + +stExtentMultipleCartesianPointGrouping +required_capability: st_extent_agg +FROM airports_web | STATS extent = ST_EXTENT_AGG(location) BY scalerank | SORT scalerank DESC | LIMIT 3 +; + +extent:cartesian_shape | scalerank:integer +BBOX (4783520.5, 1.6168486E7, 8704352.0, -584415.9375) | 9 +BBOX (-1.936604E7, 1.8695374E7, 1.4502138E7, -3943067.25) | 8 +BBOX (-1.891609E7, 1.9947946E7, 8455470.0, -7128878.5) | 7 +; + +stExtentCartesianShapes +required_capability: st_extent_agg +FROM cartesian_multipolygons | STATS extent = ST_EXTENT_AGG(shape) +; + +extent:cartesian_shape +BBOX (0.0, 3.0, 3.0, 0.0) +; + ############################################### # Tests for ST_INTERSECTS on CARTESIAN_POINT type diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index d76f4c05d955f..100c0d716d65c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -564,7 +564,8 @@ c:long | gender:keyword | trunk_worked_seconds:long 0 | null | 200000000 ; -byStringAndLongWithAlias +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +byStringAndLongWithAlias#[skip:-8.12.99] FROM employees | EVAL trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000 | RENAME gender as g, trunk_worked_seconds as tws diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java new file mode 100644 index 0000000000000..66ac32b33cd4d --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java @@ -0,0 +1,290 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.EnrichPlugin; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.junit.After; +import org.junit.Before; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; + +public abstract class AbstractEnrichBasedCrossClusterTestCase extends AbstractMultiClustersTestCase { + + public static String REMOTE_CLUSTER_1 = "c1"; + public static String REMOTE_CLUSTER_2 = "c2"; + + /** + * subclasses should override if they don't want enrich policies wiped after each test method run + */ + protected boolean tolerateErrorsWhenWipingEnrichPolicies() { + return false; + } + + @Override + protected List remoteClusterAlias() { + return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); + } + + protected Collection allClusters() { + return CollectionUtils.appendToCopy(remoteClusterAlias(), LOCAL_CLUSTER); + } + + @Override + protected Collection> nodePlugins(String clusterAlias) { + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); + plugins.add(CrossClustersEnrichIT.LocalStateEnrich.class); + plugins.add(IngestCommonPlugin.class); + plugins.add(ReindexPlugin.class); + return plugins; + } + + @Override + protected Settings nodeSettings() { + return Settings.builder().put(super.nodeSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); + } + + static final EnrichPolicy hostPolicy = new EnrichPolicy("match", null, List.of("hosts"), "ip", List.of("ip", "os")); + static final EnrichPolicy vendorPolicy = new EnrichPolicy("match", null, List.of("vendors"), "os", List.of("os", "vendor")); + + @Before + public void setupHostsEnrich() { + // the hosts policy are identical on every node + Map allHosts = Map.of( + "192.168.1.2", + "Windows", + "192.168.1.3", + "MacOS", + "192.168.1.4", + "Linux", + "192.168.1.5", + "Android", + "192.168.1.6", + "iOS", + "192.168.1.7", + "Windows", + "192.168.1.8", + "MacOS", + "192.168.1.9", + "Linux", + "192.168.1.10", + "Linux", + "192.168.1.11", + "Windows" + ); + for (String cluster : allClusters()) { + Client client = client(cluster); + client.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); + for (Map.Entry h : allHosts.entrySet()) { + client.prepareIndex("hosts").setSource("ip", h.getKey(), "os", h.getValue()).get(); + } + client.admin().indices().prepareRefresh("hosts").get(); + client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts", hostPolicy)) + .actionGet(); + client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts")) + .actionGet(); + assertAcked(client.admin().indices().prepareDelete("hosts")); + } + } + + @Before + public void setupVendorPolicy() { + var localVendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Samsung", "Linux", "Redhat"); + var c1Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Google", "Linux", "Suse"); + var c2Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Sony", "Linux", "Ubuntu"); + var vendors = Map.of(LOCAL_CLUSTER, localVendors, REMOTE_CLUSTER_1, c1Vendors, REMOTE_CLUSTER_2, c2Vendors); + for (Map.Entry> e : vendors.entrySet()) { + Client client = client(e.getKey()); + client.admin().indices().prepareCreate("vendors").setMapping("os", "type=keyword", "vendor", "type=keyword").get(); + for (Map.Entry v : e.getValue().entrySet()) { + client.prepareIndex("vendors").setSource("os", v.getKey(), "vendor", v.getValue()).get(); + } + client.admin().indices().prepareRefresh("vendors").get(); + client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors", vendorPolicy)) + .actionGet(); + client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors")) + .actionGet(); + assertAcked(client.admin().indices().prepareDelete("vendors")); + } + } + + @Before + public void setupEventsIndices() { + record Event(long timestamp, String user, String host) { + + } + List e0 = List.of( + new Event(1, "matthew", "192.168.1.3"), + new Event(2, "simon", "192.168.1.5"), + new Event(3, "park", "192.168.1.2"), + new Event(4, "andrew", "192.168.1.7"), + new Event(5, "simon", "192.168.1.20"), + new Event(6, "kevin", "192.168.1.2"), + new Event(7, "akio", "192.168.1.5"), + new Event(8, "luke", "192.168.1.2"), + new Event(9, "jack", "192.168.1.4") + ); + List e1 = List.of( + new Event(1, "andres", "192.168.1.2"), + new Event(2, "sergio", "192.168.1.6"), + new Event(3, "kylian", "192.168.1.8"), + new Event(4, "andrew", "192.168.1.9"), + new Event(5, "jack", "192.168.1.3"), + new Event(6, "kevin", "192.168.1.4"), + new Event(7, "akio", "192.168.1.7"), + new Event(8, "kevin", "192.168.1.21"), + new Event(9, "andres", "192.168.1.8") + ); + List e2 = List.of( + new Event(1, "park", "192.168.1.25"), + new Event(2, "akio", "192.168.1.5"), + new Event(3, "park", "192.168.1.2"), + new Event(4, "kevin", "192.168.1.3") + ); + for (var c : Map.of(LOCAL_CLUSTER, e0, REMOTE_CLUSTER_1, e1, REMOTE_CLUSTER_2, e2).entrySet()) { + Client client = client(c.getKey()); + client.admin() + .indices() + .prepareCreate("events") + .setMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip") + .get(); + for (var e : c.getValue()) { + client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host).get(); + } + client.admin().indices().prepareRefresh("events").get(); + } + } + + @After + public void wipeEnrichPolicies() { + for (String cluster : allClusters()) { + cluster(cluster).wipe(Set.of()); + for (String policy : List.of("hosts", "vendors")) { + if (tolerateErrorsWhenWipingEnrichPolicies()) { + try { + client(cluster).execute( + DeleteEnrichPolicyAction.INSTANCE, + new DeleteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policy) + ); + } catch (Exception e) { + assertThat(e.getMessage(), containsString("Cluster is already closed")); + } + + } else { + client(cluster).execute( + DeleteEnrichPolicyAction.INSTANCE, + new DeleteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policy) + ); + } + } + } + } + + static String enrichHosts(Enrich.Mode mode) { + return EsqlTestUtils.randomEnrichCommand("hosts", mode, hostPolicy.getMatchField(), hostPolicy.getEnrichFields()); + } + + static String enrichVendors(Enrich.Mode mode) { + return EsqlTestUtils.randomEnrichCommand("vendors", mode, vendorPolicy.getMatchField(), vendorPolicy.getEnrichFields()); + } + + protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + request.query(query); + request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); + if (randomBoolean()) { + request.profile(true); + } + if (ccsMetadataInResponse != null) { + request.includeCCSMetadata(ccsMetadataInResponse); + } + return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); + } + + public static Tuple randomIncludeCCSMetadata() { + return switch (randomIntBetween(1, 3)) { + case 1 -> new Tuple<>(Boolean.TRUE, Boolean.TRUE); + case 2 -> new Tuple<>(Boolean.FALSE, Boolean.FALSE); + case 3 -> new Tuple<>(null, Boolean.FALSE); + default -> throw new AssertionError("should not get here"); + }; + } + + public static class LocalStateEnrich extends LocalStateCompositeXPackPlugin { + public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + + plugins.add(new EnrichPlugin(settings) { + @Override + protected XPackLicenseState getLicenseState() { + return this.getLicenseState(); + } + }); + } + + public static class EnrichTransportXPackInfoAction extends TransportXPackInfoAction { + @Inject + public EnrichTransportXPackInfoAction( + TransportService transportService, + ActionFilters actionFilters, + LicenseService licenseService, + NodeClient client + ) { + super(transportService, actionFilters, licenseService, client); + } + + @Override + protected List> infoActions() { + return Collections.singletonList(XPackInfoFeatureAction.ENRICH); + } + } + + @Override + protected Class> getInfoAction() { + return CrossClustersQueriesWithInvalidLicenseIT.LocalStateEnrich.EnrichTransportXPackInfoAction.class; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java index c8206621de419..a2bba19db50fc 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.Before; import java.io.IOException; @@ -78,7 +77,7 @@ protected Map skipUnavailableForRemoteClusters() { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action plugins.add(InternalExchangePlugin.class); plugins.add(PauseFieldPlugin.class); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java index 5c3e1974e924f..09ad97b08f357 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java @@ -8,36 +8,21 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Tuple; -import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reindex.ReindexPlugin; -import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.transport.RemoteClusterAware; -import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; -import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; -import org.junit.Before; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; -import static org.elasticsearch.xpack.esql.action.CrossClustersEnrichIT.enrichHosts; -import static org.elasticsearch.xpack.esql.action.CrossClustersEnrichIT.enrichVendors; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -47,151 +32,26 @@ * This IT test is the dual of CrossClustersEnrichIT, which tests "happy path" * and this one tests unavailable cluster scenarios using (most of) the same tests. */ -public class CrossClusterEnrichUnavailableClustersIT extends AbstractMultiClustersTestCase { - - public static String REMOTE_CLUSTER_1 = "c1"; - public static String REMOTE_CLUSTER_2 = "c2"; - - @Override - protected List remoteClusterAlias() { - return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); - } +public class CrossClusterEnrichUnavailableClustersIT extends AbstractEnrichBasedCrossClusterTestCase { @Override protected boolean reuseClusters() { return false; } - private Collection allClusters() { - return CollectionUtils.appendToCopy(remoteClusterAlias(), LOCAL_CLUSTER); + @Override + protected boolean tolerateErrorsWhenWipingEnrichPolicies() { + // attempt to wipe will fail since some clusters are already closed + return true; } @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); - plugins.add(CrossClustersEnrichIT.LocalStateEnrich.class); - plugins.add(IngestCommonPlugin.class); - plugins.add(ReindexPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); return plugins; } - @Override - protected Settings nodeSettings() { - return Settings.builder().put(super.nodeSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); - } - - @Before - public void setupHostsEnrich() { - // the hosts policy are identical on every node - Map allHosts = Map.of( - "192.168.1.2", - "Windows", - "192.168.1.3", - "MacOS", - "192.168.1.4", - "Linux", - "192.168.1.5", - "Android", - "192.168.1.6", - "iOS", - "192.168.1.7", - "Windows", - "192.168.1.8", - "MacOS", - "192.168.1.9", - "Linux", - "192.168.1.10", - "Linux", - "192.168.1.11", - "Windows" - ); - for (String cluster : allClusters()) { - Client client = client(cluster); - client.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); - for (Map.Entry h : allHosts.entrySet()) { - client.prepareIndex("hosts").setSource("ip", h.getKey(), "os", h.getValue()).get(); - } - client.admin().indices().prepareRefresh("hosts").get(); - client.execute( - PutEnrichPolicyAction.INSTANCE, - new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts", CrossClustersEnrichIT.hostPolicy) - ).actionGet(); - client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts")) - .actionGet(); - assertAcked(client.admin().indices().prepareDelete("hosts")); - } - } - - @Before - public void setupVendorPolicy() { - var localVendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Samsung", "Linux", "Redhat"); - var c1Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Google", "Linux", "Suse"); - var c2Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Sony", "Linux", "Ubuntu"); - var vendors = Map.of(LOCAL_CLUSTER, localVendors, "c1", c1Vendors, "c2", c2Vendors); - for (Map.Entry> e : vendors.entrySet()) { - Client client = client(e.getKey()); - client.admin().indices().prepareCreate("vendors").setMapping("os", "type=keyword", "vendor", "type=keyword").get(); - for (Map.Entry v : e.getValue().entrySet()) { - client.prepareIndex("vendors").setSource("os", v.getKey(), "vendor", v.getValue()).get(); - } - client.admin().indices().prepareRefresh("vendors").get(); - client.execute( - PutEnrichPolicyAction.INSTANCE, - new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors", CrossClustersEnrichIT.vendorPolicy) - ).actionGet(); - client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors")) - .actionGet(); - assertAcked(client.admin().indices().prepareDelete("vendors")); - } - } - - @Before - public void setupEventsIndices() { - record Event(long timestamp, String user, String host) {} - - List e0 = List.of( - new Event(1, "matthew", "192.168.1.3"), - new Event(2, "simon", "192.168.1.5"), - new Event(3, "park", "192.168.1.2"), - new Event(4, "andrew", "192.168.1.7"), - new Event(5, "simon", "192.168.1.20"), - new Event(6, "kevin", "192.168.1.2"), - new Event(7, "akio", "192.168.1.5"), - new Event(8, "luke", "192.168.1.2"), - new Event(9, "jack", "192.168.1.4") - ); - List e1 = List.of( - new Event(1, "andres", "192.168.1.2"), - new Event(2, "sergio", "192.168.1.6"), - new Event(3, "kylian", "192.168.1.8"), - new Event(4, "andrew", "192.168.1.9"), - new Event(5, "jack", "192.168.1.3"), - new Event(6, "kevin", "192.168.1.4"), - new Event(7, "akio", "192.168.1.7"), - new Event(8, "kevin", "192.168.1.21"), - new Event(9, "andres", "192.168.1.8") - ); - List e2 = List.of( - new Event(1, "park", "192.168.1.25"), - new Event(2, "akio", "192.168.1.5"), - new Event(3, "park", "192.168.1.2"), - new Event(4, "kevin", "192.168.1.3") - ); - for (var c : Map.of(LOCAL_CLUSTER, e0, "c1", e1, "c2", e2).entrySet()) { - Client client = client(c.getKey()); - client.admin() - .indices() - .prepareCreate("events") - .setMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip") - .get(); - for (var e : c.getValue()) { - client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host).get(); - } - client.admin().indices().prepareRefresh("events").get(); - } - } - public void testEnrichWithHostsPolicyAndDisconnectedRemotesWithSkipUnavailableTrue() throws IOException { setSkipUnavailable(REMOTE_CLUSTER_1, true); setSkipUnavailable(REMOTE_CLUSTER_2, true); @@ -645,19 +505,6 @@ public void testEnrichRemoteWithVendor() throws IOException { } } - protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { - EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query(query); - request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); - if (randomBoolean()) { - request.profile(true); - } - if (ccsMetadataInResponse != null) { - request.includeCCSMetadata(ccsMetadataInResponse); - } - return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); - } - private static void assertCCSExecutionInfoDetails(EsqlExecutionInfo executionInfo) { assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); assertTrue(executionInfo.isCrossClusterSearch()); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java index d1c9b5cfb2ac7..f65764daafb8a 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import java.io.IOException; import java.util.ArrayList; @@ -54,8 +53,8 @@ protected boolean reuseClusters() { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); - plugins.add(org.elasticsearch.xpack.esql.action.CrossClustersQueryIT.InternalExchangePlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); + plugins.add(CrossClustersQueryIT.InternalExchangePlugin.class); return plugins; } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index 5291ad3b0d039..17f5f81486651 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -33,7 +33,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.plugin.ComputeService; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.Before; import java.util.ArrayList; @@ -62,7 +61,7 @@ protected List remoteClusterAlias() { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); plugins.add(InternalExchangePlugin.class); plugins.add(PauseFieldPlugin.class); return plugins; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java index 57f85751999a5..4e6be6cc2bf74 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -7,218 +7,34 @@ package org.elasticsearch.xpack.esql.action; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Tuple; -import org.elasticsearch.ingest.common.IngestCommonPlugin; -import org.elasticsearch.injection.guice.Inject; -import org.elasticsearch.license.LicenseService; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.protocol.xpack.XPackInfoRequest; -import org.elasticsearch.protocol.xpack.XPackInfoResponse; -import org.elasticsearch.reindex.ReindexPlugin; -import org.elasticsearch.test.AbstractMultiClustersTestCase; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; -import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; -import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; -import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; -import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; -import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; -import org.elasticsearch.xpack.enrich.EnrichPlugin; -import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; -import org.junit.After; -import org.junit.Before; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -public class CrossClustersEnrichIT extends AbstractMultiClustersTestCase { - - @Override - protected List remoteClusterAlias() { - return List.of("c1", "c2"); - } - - protected Collection allClusters() { - return CollectionUtils.appendToCopy(remoteClusterAlias(), LOCAL_CLUSTER); - } +public class CrossClustersEnrichIT extends AbstractEnrichBasedCrossClusterTestCase { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); - plugins.add(LocalStateEnrich.class); - plugins.add(IngestCommonPlugin.class); - plugins.add(ReindexPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); return plugins; } - @Override - protected Settings nodeSettings() { - return Settings.builder().put(super.nodeSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); - } - - static final EnrichPolicy hostPolicy = new EnrichPolicy("match", null, List.of("hosts"), "ip", List.of("ip", "os")); - static final EnrichPolicy vendorPolicy = new EnrichPolicy("match", null, List.of("vendors"), "os", List.of("os", "vendor")); - - @Before - public void setupHostsEnrich() { - // the hosts policy are identical on every node - Map allHosts = Map.of( - "192.168.1.2", - "Windows", - "192.168.1.3", - "MacOS", - "192.168.1.4", - "Linux", - "192.168.1.5", - "Android", - "192.168.1.6", - "iOS", - "192.168.1.7", - "Windows", - "192.168.1.8", - "MacOS", - "192.168.1.9", - "Linux", - "192.168.1.10", - "Linux", - "192.168.1.11", - "Windows" - ); - for (String cluster : allClusters()) { - Client client = client(cluster); - client.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); - for (Map.Entry h : allHosts.entrySet()) { - client.prepareIndex("hosts").setSource("ip", h.getKey(), "os", h.getValue()).get(); - } - client.admin().indices().prepareRefresh("hosts").get(); - client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts", hostPolicy)) - .actionGet(); - client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts")) - .actionGet(); - assertAcked(client.admin().indices().prepareDelete("hosts")); - } - } - - @Before - public void setupVendorPolicy() { - var localVendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Samsung", "Linux", "Redhat"); - var c1Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Google", "Linux", "Suse"); - var c2Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Sony", "Linux", "Ubuntu"); - var vendors = Map.of(LOCAL_CLUSTER, localVendors, "c1", c1Vendors, "c2", c2Vendors); - for (Map.Entry> e : vendors.entrySet()) { - Client client = client(e.getKey()); - client.admin().indices().prepareCreate("vendors").setMapping("os", "type=keyword", "vendor", "type=keyword").get(); - for (Map.Entry v : e.getValue().entrySet()) { - client.prepareIndex("vendors").setSource("os", v.getKey(), "vendor", v.getValue()).get(); - } - client.admin().indices().prepareRefresh("vendors").get(); - client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors", vendorPolicy)) - .actionGet(); - client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors")) - .actionGet(); - assertAcked(client.admin().indices().prepareDelete("vendors")); - } - } - - @Before - public void setupEventsIndices() { - record Event(long timestamp, String user, String host) { - - } - List e0 = List.of( - new Event(1, "matthew", "192.168.1.3"), - new Event(2, "simon", "192.168.1.5"), - new Event(3, "park", "192.168.1.2"), - new Event(4, "andrew", "192.168.1.7"), - new Event(5, "simon", "192.168.1.20"), - new Event(6, "kevin", "192.168.1.2"), - new Event(7, "akio", "192.168.1.5"), - new Event(8, "luke", "192.168.1.2"), - new Event(9, "jack", "192.168.1.4") - ); - List e1 = List.of( - new Event(1, "andres", "192.168.1.2"), - new Event(2, "sergio", "192.168.1.6"), - new Event(3, "kylian", "192.168.1.8"), - new Event(4, "andrew", "192.168.1.9"), - new Event(5, "jack", "192.168.1.3"), - new Event(6, "kevin", "192.168.1.4"), - new Event(7, "akio", "192.168.1.7"), - new Event(8, "kevin", "192.168.1.21"), - new Event(9, "andres", "192.168.1.8") - ); - List e2 = List.of( - new Event(1, "park", "192.168.1.25"), - new Event(2, "akio", "192.168.1.5"), - new Event(3, "park", "192.168.1.2"), - new Event(4, "kevin", "192.168.1.3") - ); - for (var c : Map.of(LOCAL_CLUSTER, e0, "c1", e1, "c2", e2).entrySet()) { - Client client = client(c.getKey()); - client.admin() - .indices() - .prepareCreate("events") - .setMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip") - .get(); - for (var e : c.getValue()) { - client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host).get(); - } - client.admin().indices().prepareRefresh("events").get(); - } - } - - @After - public void wipeEnrichPolicies() { - for (String cluster : allClusters()) { - cluster(cluster).wipe(Set.of()); - for (String policy : List.of("hosts", "vendors")) { - client(cluster).execute( - DeleteEnrichPolicyAction.INSTANCE, - new DeleteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policy) - ); - } - } - } - - static String enrichHosts(Enrich.Mode mode) { - return EsqlTestUtils.randomEnrichCommand("hosts", mode, hostPolicy.getMatchField(), hostPolicy.getEnrichFields()); - } - - static String enrichVendors(Enrich.Mode mode) { - return EsqlTestUtils.randomEnrichCommand("vendors", mode, vendorPolicy.getMatchField(), vendorPolicy.getEnrichFields()); - } - public void testWithHostsPolicy() { for (var mode : Enrich.Mode.values()) { String query = "FROM events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; @@ -606,19 +422,6 @@ public void testEnrichCoordinatorThenEnrichRemote() { ); } - protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { - EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query(query); - request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); - if (randomBoolean()) { - request.profile(true); - } - if (ccsMetadataInResponse != null) { - request.includeCCSMetadata(ccsMetadataInResponse); - } - return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); - } - private static void assertCCSExecutionInfoDetails(EsqlExecutionInfo executionInfo) { assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); assertTrue(executionInfo.isCrossClusterSearch()); @@ -637,49 +440,4 @@ private static void assertCCSExecutionInfoDetails(EsqlExecutionInfo executionInf assertThat(cluster.getFailedShards(), equalTo(0)); } } - - public static Tuple randomIncludeCCSMetadata() { - return switch (randomIntBetween(1, 3)) { - case 1 -> new Tuple<>(Boolean.TRUE, Boolean.TRUE); - case 2 -> new Tuple<>(Boolean.FALSE, Boolean.FALSE); - case 3 -> new Tuple<>(null, Boolean.FALSE); - default -> throw new AssertionError("should not get here"); - }; - } - - public static class LocalStateEnrich extends LocalStateCompositeXPackPlugin { - - public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { - super(settings, configPath); - - plugins.add(new EnrichPlugin(settings) { - @Override - protected XPackLicenseState getLicenseState() { - return this.getLicenseState(); - } - }); - } - - public static class EnrichTransportXPackInfoAction extends TransportXPackInfoAction { - @Inject - public EnrichTransportXPackInfoAction( - TransportService transportService, - ActionFilters actionFilters, - LicenseService licenseService, - NodeClient client - ) { - super(transportService, actionFilters, licenseService, client); - } - - @Override - protected List> infoActions() { - return Collections.singletonList(XPackInfoFeatureAction.ENRICH); - } - } - - @Override - protected Class> getInfoAction() { - return EnrichTransportXPackInfoAction.class; - } - } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java new file mode 100644 index 0000000000000..1ed42b696d65e --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java @@ -0,0 +1,203 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +public class CrossClustersQueriesWithInvalidLicenseIT extends AbstractEnrichBasedCrossClusterTestCase { + + private static final String LICENSE_ERROR_MESSAGE = "A valid Enterprise license is required to run ES|QL cross-cluster searches."; + + @Override + protected Collection> nodePlugins(String clusterAlias) { + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); + plugins.add(EsqlPluginWithNonEnterpriseOrExpiredLicense.class); // key plugin for the test + return plugins; + } + + public void testBasicCrossClusterQuery() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + ElasticsearchStatusException e = expectThrows( + ElasticsearchStatusException.class, + () -> runQuery("FROM *,*:* | LIMIT 5", requestIncludeMeta) + ); + assertThat(e.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testMetadataCrossClusterQuery() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + ElasticsearchStatusException e = expectThrows( + ElasticsearchStatusException.class, + () -> runQuery("FROM events,*:* METADATA _index | SORT _index", requestIncludeMeta) + ); + assertThat(e.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testQueryAgainstNonMatchingClusterWildcardPattern() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + // since this wildcarded expression does not resolve to a valid remote cluster, it is not considered + // a cross-cluster search and thus should not throw a license error + String q = "FROM xremote*:events"; + { + String limit1 = q + " | STATS count(*)"; + try (EsqlQueryResponse resp = runQuery(limit1, requestIncludeMeta)) { + assertThat(resp.columns().size(), equalTo(1)); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + } + + String limit0 = q + " | LIMIT 0"; + try (EsqlQueryResponse resp = runQuery(limit0, requestIncludeMeta)) { + assertThat(resp.columns().size(), equalTo(1)); + assertThat(getValuesList(resp).size(), equalTo(0)); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + } + } + } + + public void testCCSWithLimit0() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + + // local only query does not need a valid Enterprise or Trial license + try (EsqlQueryResponse resp = runQuery("FROM events | LIMIT 0", requestIncludeMeta)) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + } + + // cross-cluster searches should fail with license error + String q = randomFrom("FROM events,c1:* | LIMIT 0", "FROM c1:* | LIMIT 0"); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> runQuery(q, requestIncludeMeta)); + assertThat(e.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testSearchesWhereNonExistentClusterIsSpecified() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + // this one query should be allowed since x* does not resolve to any known remote cluster + try (EsqlQueryResponse resp = runQuery("FROM events,x*:no_such_index* | STATS count(*)", requestIncludeMeta)) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + List> values = getValuesList(resp); + assertThat(values, hasSize(1)); + + assertNotNull(executionInfo); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER))); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + // since this not a CCS, only the overall took time in the EsqlExecutionInfo matters + assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + } + + ElasticsearchStatusException e = expectThrows( + ElasticsearchStatusException.class, + () -> runQuery("FROM events,no_such_cluster:no_such_index* | STATS count(*)", requestIncludeMeta) + ); + // with a valid license this would throw "no such remote cluster" exception, but without a valid license, should get a license error + assertThat(e.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testEnrichWithHostsPolicy() { + // local-only queries do not need an Enterprise or Trial license + for (var mode : Enrich.Mode.values()) { + String query = "FROM events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; + try (EsqlQueryResponse resp = runQuery(query, null)) { + List> rows = getValuesList(resp); + assertThat( + rows, + equalTo( + List.of( + List.of(2L, "Android"), + List.of(1L, "Linux"), + List.of(1L, "MacOS"), + List.of(4L, "Windows"), + Arrays.asList(1L, (String) null) + ) + ) + ); + assertFalse(resp.getExecutionInfo().isCrossClusterSearch()); + } + } + + // cross-cluster query should fail due to not having valid Enterprise or Trial license + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + + for (var mode : Enrich.Mode.values()) { + String query = "FROM *:events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> runQuery(query, requestIncludeMeta)); + assertThat(e.getMessage(), containsString("A valid Enterprise license is required to run ES|QL cross-cluster searches.")); + } + + for (var mode : Enrich.Mode.values()) { + String query = "FROM *:events,events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> runQuery(query, requestIncludeMeta)); + assertThat(e.getMessage(), containsString("A valid Enterprise license is required to run ES|QL cross-cluster searches.")); + } + } + + public void testAggThenEnrichRemote() { + String query = String.format(Locale.ROOT, """ + FROM *:events,events + | eval ip= TO_STR(host) + | %s + | stats c = COUNT(*) by os + | %s + | sort vendor + """, enrichHosts(Enrich.Mode.ANY), enrichVendors(Enrich.Mode.REMOTE)); + var error = expectThrows(ElasticsearchStatusException.class, () -> runQuery(query, randomBoolean()).close()); + // with a valid license this would fail with "ENRICH with remote policy can't be executed after STATS", so ensure here + // that the license error is detected first and returned rather than a VerificationException + assertThat(error.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testEnrichCoordinatorThenEnrichRemote() { + String query = String.format(Locale.ROOT, """ + FROM *:events,events + | eval ip= TO_STR(host) + | %s + | %s + | sort vendor + """, enrichHosts(Enrich.Mode.COORDINATOR), enrichVendors(Enrich.Mode.REMOTE)); + var error = expectThrows(ElasticsearchStatusException.class, () -> runQuery(query, randomBoolean()).close()); + assertThat( + error.getMessage(), + // with a valid license the error is "ENRICH with remote policy can't be executed after another ENRICH with coordinator policy", + // so ensure here that the license error is detected first and returned rather than a VerificationException + containsString(LICENSE_ERROR_MESSAGE) + ); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index 46bbad5551e6b..347ef419cab9b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.io.IOException; @@ -73,13 +72,13 @@ protected List remoteClusterAlias() { @Override protected Map skipUnavailableForRemoteClusters() { - return Map.of(REMOTE_CLUSTER_1, randomBoolean()); + return Map.of(REMOTE_CLUSTER_1, randomBoolean(), REMOTE_CLUSTER_2, randomBoolean()); } @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); plugins.add(InternalExchangePlugin.class); return plugins; } @@ -184,7 +183,7 @@ public void testSuccessfulPathways() { } public void testSearchesAgainstNonMatchingIndicesWithLocalOnly() { - Map testClusterInfo = setupClusters(2); + Map testClusterInfo = setupTwoClusters(); String localIndex = (String) testClusterInfo.get("local.index"); { @@ -905,7 +904,7 @@ public void testSearchesWhereNonExistentClusterIsSpecifiedWithWildcards() { // cluster-foo* matches nothing and so should not be present in the EsqlExecutionInfo try ( EsqlQueryResponse resp = runQuery( - "from logs-*,no_such_index*,cluster-a:no_such_index*,cluster-foo*:* | stats sum (v)", + "FROM logs-*,no_such_index*,cluster-a:no_such_index*,cluster-foo*:* | STATS sum (v)", requestIncludeMeta ) ) { @@ -1009,7 +1008,7 @@ public void testMetadataIndex() { try ( EsqlQueryResponse resp = runQuery( - "FROM logs*,*:logs* METADATA _index | stats sum(v) by _index | sort _index", + Strings.format("FROM logs*,%s:logs* METADATA _index | stats sum(v) by _index | sort _index", REMOTE_CLUSTER_1), requestIncludeMeta ) ) { @@ -1091,7 +1090,7 @@ public void testProfile() { final int remoteOnlyProfiles; { EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query("FROM *:logs* | stats sum(v)"); + request.query("FROM c*:logs* | stats sum(v)"); request.pragmas(pragmas); request.profile(true); try (EsqlQueryResponse resp = runQuery(request)) { @@ -1124,7 +1123,7 @@ public void testProfile() { final int allProfiles; { EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query("FROM logs*,*:logs* | stats total = sum(v)"); + request.query("FROM logs*,c*:logs* | stats total = sum(v)"); request.pragmas(pragmas); request.profile(true); try (EsqlQueryResponse resp = runQuery(request)) { @@ -1169,7 +1168,7 @@ public void testWarnings() throws Exception { int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query("FROM logs*,*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); + request.query("FROM logs*,c*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); InternalTestCluster cluster = cluster(LOCAL_CLUSTER); String node = randomFrom(cluster.getNodeNames()); CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithEnterpriseOrTrialLicense.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithEnterpriseOrTrialLicense.java new file mode 100644 index 0000000000000..34d09fc541572 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithEnterpriseOrTrialLicense.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; + +import static org.elasticsearch.test.ESTestCase.randomFrom; + +/** + * In IT tests, use this instead of the EsqlPlugin in order to use ES|QL features + * that require an Enteprise (or Trial) license. + */ +public class EsqlPluginWithEnterpriseOrTrialLicense extends EsqlPlugin { + protected XPackLicenseState getLicenseState() { + License.OperationMode operationMode = randomFrom(License.OperationMode.ENTERPRISE, License.OperationMode.TRIAL); + return new XPackLicenseState(() -> System.currentTimeMillis(), new XPackLicenseStatus(operationMode, true, "Test license expired")); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithNonEnterpriseOrExpiredLicense.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithNonEnterpriseOrExpiredLicense.java new file mode 100644 index 0000000000000..46c3f3f6204cd --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithNonEnterpriseOrExpiredLicense.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; + +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomFrom; + +/** + * In IT tests, use this instead of the EsqlPlugin in order to test ES|QL features + * using either a: + * - an active (non-expired) basic, standard, missing, gold or platinum Elasticsearch license, OR + * - an expired enterprise or trial license + */ +public class EsqlPluginWithNonEnterpriseOrExpiredLicense extends EsqlPlugin { + protected XPackLicenseState getLicenseState() { + License.OperationMode operationMode; + boolean active; + if (randomBoolean()) { + operationMode = randomFrom( + License.OperationMode.PLATINUM, + License.OperationMode.GOLD, + License.OperationMode.BASIC, + License.OperationMode.MISSING, + License.OperationMode.STANDARD + ); + active = true; + } else { + operationMode = randomFrom(License.OperationMode.ENTERPRISE, License.OperationMode.TRIAL); + active = false; // expired + } + + return new XPackLicenseState( + () -> System.currentTimeMillis(), + new XPackLicenseStatus(operationMode, active, "Test license expired") + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index d9670b256da17..2eadb220c8b90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -189,6 +189,9 @@ public enum Cap { */ ST_DISTANCE, + /** Support for function {@code ST_EXTENT}. */ + ST_EXTENT_AGG, + /** * Fix determination of CRS types in spatial functions when folding. */ @@ -367,6 +370,11 @@ public enum Cap { */ DATE_TRUNC_DATE_NANOS(), + /** + * Support date nanos values as the field argument to bucket + */ + DATE_NANOS_BUCKET(), + /** * support aggregations on date nanos */ @@ -422,6 +430,10 @@ public enum Cap { */ CATEGORIZE_V5, + /** + * Support for multiple groupings in "CATEGORIZE". + */ + CATEGORIZE_MULTIPLE_GROUPINGS, /** * QSTR function */ @@ -538,7 +550,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V4(Build.current().isSnapshot()), + JOIN_LOOKUP_V5(Build.current().isSnapshot()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index c805adf5d5a57..f01cc265e330b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -316,11 +316,15 @@ private static void checkAggregate(LogicalPlan p, Set failures) { private static void checkCategorizeGrouping(Aggregate agg, Set failures) { // Forbid CATEGORIZE grouping function with other groupings if (agg.groupings().size() > 1) { - agg.groupings().forEach(g -> { + agg.groupings().subList(1, agg.groupings().size()).forEach(g -> { g.forEachDown( Categorize.class, categorize -> failures.add( - fail(categorize, "cannot use CATEGORIZE grouping function [{}] with multiple groupings", categorize.sourceText()) + fail( + categorize, + "CATEGORIZE grouping function [{}] can only be in the first grouping expression", + categorize.sourceText() + ) ) ); }); @@ -601,6 +605,10 @@ private void gatherMetrics(LogicalPlan plan, BitSet b) { functions.forEach(f -> metrics.incFunctionMetric(f)); } + public XPackLicenseState licenseState() { + return licenseState; + } + /** * Limit QL's comparisons to types we support. This should agree with * {@link EsqlBinaryComparison}'s checkCompatibility method diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3749b46879354..1ccc22eb3a6a4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent; import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; @@ -353,6 +354,7 @@ private static FunctionDefinition[][] functions() { new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid_agg"), def(SpatialContains.class, SpatialContains::new, "st_contains"), + def(SpatialExtent.class, SpatialExtent::new, "st_extent_agg"), def(SpatialDisjoint.class, SpatialDisjoint::new, "st_disjoint"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), def(SpatialWithin.class, SpatialWithin::new, "st_within"), @@ -414,7 +416,7 @@ private static FunctionDefinition[][] functions() { def(MvSum.class, MvSum::new, "mv_sum"), def(Split.class, Split::new, "split") }, // fulltext functions - new FunctionDefinition[] { def(Match.class, Match::new, "match"), def(QueryString.class, QueryString::new, "qstr") } }; + new FunctionDefinition[] { def(Match.class, bi(Match::new), "match"), def(QueryString.class, uni(QueryString::new), "qstr") } }; } @@ -424,9 +426,9 @@ private static FunctionDefinition[][] snapshotFunctions() { // The delay() function is for debug/snapshot environments only and should never be enabled in a non-snapshot build. // This is an experimental function and can be removed without notice. def(Delay.class, Delay::new, "delay"), - def(Kql.class, Kql::new, "kql"), + def(Kql.class, uni(Kql::new), "kql"), def(Rate.class, Rate::withUnresolvedTimestamp, "rate"), - def(Term.class, Term::new, "term") } }; + def(Term.class, bi(Term::new), "term") } }; } public EsqlFunctionRegistry snapshotRegistry() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java index d74b5c8b386b8..db1d2a9e6f254 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java @@ -25,6 +25,7 @@ public static List getNamedWriteables() { Percentile.ENTRY, Rate.ENTRY, SpatialCentroid.ENTRY, + SpatialExtent.ENTRY, StdDev.ENTRY, Sum.ENTRY, Top.ENTRY, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java index 87eec540932b1..35f99e4b648df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java @@ -8,6 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference; +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -22,26 +25,34 @@ * select the best one. */ public abstract class SpatialAggregateFunction extends AggregateFunction { - protected final boolean useDocValues; + protected final FieldExtractPreference fieldExtractPreference; - protected SpatialAggregateFunction(Source source, Expression field, Expression filter, boolean useDocValues) { + protected SpatialAggregateFunction(Source source, Expression field, Expression filter, FieldExtractPreference fieldExtractPreference) { super(source, field, filter, emptyList()); - this.useDocValues = useDocValues; + this.fieldExtractPreference = fieldExtractPreference; } - protected SpatialAggregateFunction(StreamInput in, boolean useDocValues) throws IOException { + protected SpatialAggregateFunction(StreamInput in, FieldExtractPreference fieldExtractPreference) throws IOException { super(in); - // The useDocValues field is only used on data nodes local planning, and therefor never serialized - this.useDocValues = useDocValues; + // The fieldExtractPreference field is only used on data nodes local planning, and therefore never serialized + this.fieldExtractPreference = fieldExtractPreference; } public abstract SpatialAggregateFunction withDocValues(); + @Override + public boolean checkLicense(XPackLicenseState state) { + return switch (field().dataType()) { + case GEO_SHAPE, CARTESIAN_SHAPE -> state.isAllowedByLicense(License.OperationMode.PLATINUM); + default -> true; + }; + } + @Override public int hashCode() { // NB: the hashcode is currently used for key generation so // to avoid clashes between aggs with the same arguments, add the class name as variation - return Objects.hash(getClass(), children(), useDocValues); + return Objects.hash(getClass(), children(), fieldExtractPreference); } @Override @@ -50,12 +61,12 @@ public boolean equals(Object obj) { SpatialAggregateFunction other = (SpatialAggregateFunction) obj; return Objects.equals(other.field(), field()) && Objects.equals(other.parameters(), parameters()) - && Objects.equals(other.useDocValues, useDocValues); + && Objects.equals(other.fieldExtractPreference, fieldExtractPreference); } return false; } - public boolean useDocValues() { - return useDocValues; + public FieldExtractPreference fieldExtractPreference() { + return fieldExtractPreference; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java index aad95c07e3492..84915d024ea82 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier; +import org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -27,6 +28,7 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.NONE; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatialPoint; @@ -47,15 +49,15 @@ public class SpatialCentroid extends SpatialAggregateFunction implements ToAggre examples = @Example(file = "spatial", tag = "st_centroid_agg-airports") ) public SpatialCentroid(Source source, @Param(name = "field", type = { "geo_point", "cartesian_point" }) Expression field) { - this(source, field, Literal.TRUE, false); + this(source, field, Literal.TRUE, NONE); } - private SpatialCentroid(Source source, Expression field, Expression filter, boolean useDocValues) { - super(source, field, filter, useDocValues); + private SpatialCentroid(Source source, Expression field, Expression filter, FieldExtractPreference preference) { + super(source, field, filter, preference); } private SpatialCentroid(StreamInput in) throws IOException { - super(in, false); + super(in, NONE); } @Override @@ -65,12 +67,12 @@ public String getWriteableName() { @Override public SpatialCentroid withFilter(Expression filter) { - return new SpatialCentroid(source(), field(), filter, useDocValues); + return new SpatialCentroid(source(), field(), filter, fieldExtractPreference); } @Override public SpatialCentroid withDocValues() { - return new SpatialCentroid(source(), field(), filter(), true); + return new SpatialCentroid(source(), field(), filter(), FieldExtractPreference.DOC_VALUES); } @Override @@ -98,23 +100,16 @@ public SpatialCentroid replaceChildren(List newChildren) { @Override public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); - if (useDocValues) { - // When the points are read as doc-values (eg. from the index), feed them into the doc-values aggregator - if (type == DataType.GEO_POINT) { - return new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.CARTESIAN_POINT) { - return new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); - } - } else { - // When the points are read as WKB from source or as point literals, feed them into the source-values aggregator - if (type == DataType.GEO_POINT) { - return new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.CARTESIAN_POINT) { - return new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); - } - } - throw EsqlIllegalArgumentException.illegalDataType(type); + return switch (type) { + case DataType.GEO_POINT -> switch (fieldExtractPreference) { + case DOC_VALUES -> new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); + case NONE -> new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + }; + case DataType.CARTESIAN_POINT -> switch (fieldExtractPreference) { + case DOC_VALUES -> new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); + case NONE -> new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + }; + default -> throw EsqlIllegalArgumentException.illegalDataType(type); + }; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java new file mode 100644 index 0000000000000..5cc1701faf13a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentCartesianShapeAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentGeoShapeAggregatorFunctionSupplier; +import org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.ToAggregator; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatial; + +/** + * Calculate spatial extent of all values of a field in matching documents. + */ +public final class SpatialExtent extends SpatialAggregateFunction implements ToAggregator { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "SpatialExtent", + SpatialExtent::new + ); + + @FunctionInfo( + returnType = { "geo_shape", "cartesian_shape" }, + description = "Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field.", + isAggregation = true, + examples = @Example(file = "spatial", tag = "st_extent_agg-airports") + ) + public SpatialExtent( + Source source, + @Param(name = "field", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }) Expression field + ) { + this(source, field, Literal.TRUE, FieldExtractPreference.NONE); + } + + private SpatialExtent(Source source, Expression field, Expression filter, FieldExtractPreference preference) { + super(source, field, filter, preference); + } + + private SpatialExtent(StreamInput in) throws IOException { + super(in, FieldExtractPreference.NONE); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public SpatialExtent withFilter(Expression filter) { + return new SpatialExtent(source(), field(), filter, fieldExtractPreference); + } + + @Override + public org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent withDocValues() { + return new SpatialExtent(source(), field(), filter(), FieldExtractPreference.DOC_VALUES); + } + + @Override + protected TypeResolution resolveType() { + return isSpatial(field(), sourceText(), DEFAULT); + } + + @Override + public DataType dataType() { + return DataType.isSpatialGeo(field().dataType()) ? DataType.GEO_SHAPE : DataType.CARTESIAN_SHAPE; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialExtent::new, field()); + } + + @Override + public SpatialExtent replaceChildren(List newChildren) { + return new SpatialExtent(source(), newChildren.get(0)); + } + + @Override + public AggregatorFunctionSupplier supplier(List inputChannels) { + return switch (field().dataType()) { + case DataType.GEO_POINT -> switch (fieldExtractPreference) { + case DOC_VALUES -> new SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); + case NONE -> new SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + }; + case DataType.CARTESIAN_POINT -> switch (fieldExtractPreference) { + case DOC_VALUES -> new SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); + case NONE -> new SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + }; + // Shapes don't differentiate between source and doc values. + case DataType.GEO_SHAPE -> new SpatialExtentGeoShapeAggregatorFunctionSupplier(inputChannels); + case DataType.CARTESIAN_SHAPE -> new SpatialExtentCartesianShapeAggregatorFunctionSupplier(inputChannels); + default -> throw EsqlIllegalArgumentException.illegalDataType(field().dataType()); + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 78dc05af8f342..432d2d5f07429 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -8,14 +8,21 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.expression.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; +import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.TranslationAwareExpressionQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.util.List; +import java.util.Objects; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNullAndFoldable; @@ -26,13 +33,15 @@ * These functions needs to be pushed down to Lucene queries to be executed - there's no Evaluator for them, but depend on * {@link org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer} to rewrite them into Lucene queries. */ -public abstract class FullTextFunction extends Function { +public abstract class FullTextFunction extends Function implements TranslationAware { private final Expression query; + private final QueryBuilder queryBuilder; - protected FullTextFunction(Source source, Expression query, List children) { + protected FullTextFunction(Source source, Expression query, List children, QueryBuilder queryBuilder) { super(source, children); this.query = query; + this.queryBuilder = queryBuilder; } @Override @@ -116,4 +125,37 @@ public Nullability nullable() { public String functionType() { return "function"; } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), queryBuilder); + } + + @Override + public boolean equals(Object obj) { + if (false == super.equals(obj)) { + return false; + } + + return Objects.equals(queryBuilder, ((FullTextFunction) obj).queryBuilder); + } + + @Override + public Query asQuery(TranslatorHandler translatorHandler) { + if (queryBuilder != null) { + return new TranslationAwareExpressionQuery(source(), queryBuilder); + } + + ExpressionTranslator translator = translator(); + return translator.translate(this, translatorHandler); + } + + public QueryBuilder queryBuilder() { + return queryBuilder; + } + + @SuppressWarnings("rawtypes") + protected abstract ExpressionTranslator translator(); + + public abstract Expression replaceQueryBuilder(QueryBuilder queryBuilder); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java index c03902373c02e..1f7bcadd259a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; import org.elasticsearch.xpack.esql.querydsl.query.KqlQuery; import java.io.IOException; @@ -26,7 +30,7 @@ * Full text function that performs a {@link KqlQuery} . */ public class Kql extends FullTextFunction { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Kql", Kql::new); + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Kql", Kql::readFrom); @FunctionInfo( returnType = "boolean", @@ -42,17 +46,30 @@ public Kql( description = "Query string in KQL query string format." ) Expression queryString ) { - super(source, queryString, List.of(queryString)); + super(source, queryString, List.of(queryString), null); } - private Kql(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); + public Kql(Source source, Expression queryString, QueryBuilder queryBuilder) { + super(source, queryString, List.of(queryString), queryBuilder); + } + + private static Kql readFrom(StreamInput in) throws IOException { + Source source = Source.readFrom((PlanStreamInput) in); + Expression query = in.readNamedWriteable(Expression.class); + QueryBuilder queryBuilder = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); + } + return new Kql(source, query, queryBuilder); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(query()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + out.writeOptionalNamedWriteable(queryBuilder()); + } } @Override @@ -62,12 +79,21 @@ public String getWriteableName() { @Override public Expression replaceChildren(List newChildren) { - return new Kql(source(), newChildren.get(0)); + return new Kql(source(), newChildren.get(0), queryBuilder()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Kql::new, query()); + return NodeInfo.create(this, Kql::new, query(), queryBuilder()); } + @Override + protected ExpressionTranslator translator() { + return new EsqlExpressionTranslators.KqlFunctionTranslator(); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new Kql(source(), query(), queryBuilder); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index 2b9a7c73a5853..0b2268fe1b022 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -8,15 +8,18 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -27,6 +30,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.io.IOException; @@ -109,7 +113,11 @@ public Match( description = "Value to find in the provided field." ) Expression matchQuery ) { - super(source, matchQuery, List.of(field, matchQuery)); + this(source, field, matchQuery, null); + } + + public Match(Source source, Expression field, Expression matchQuery, QueryBuilder queryBuilder) { + super(source, matchQuery, List.of(field, matchQuery), queryBuilder); this.field = field; } @@ -117,7 +125,11 @@ private static Match readFrom(StreamInput in) throws IOException { Source source = Source.readFrom((PlanStreamInput) in); Expression field = in.readNamedWriteable(Expression.class); Expression query = in.readNamedWriteable(Expression.class); - return new Match(source, field, query); + QueryBuilder queryBuilder = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); + } + return new Match(source, field, query, queryBuilder); } @Override @@ -125,6 +137,9 @@ public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(field()); out.writeNamedWriteable(query()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + out.writeOptionalNamedWriteable(queryBuilder()); + } } @Override @@ -224,12 +239,12 @@ public Object queryAsObject() { @Override public Expression replaceChildren(List newChildren) { - return new Match(source(), newChildren.get(0), newChildren.get(1)); + return new Match(source(), newChildren.get(0), newChildren.get(1), queryBuilder()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Match::new, field, query()); + return NodeInfo.create(this, Match::new, field, query(), queryBuilder()); } protected TypeResolutions.ParamOrdinal queryParamOrdinal() { @@ -245,6 +260,16 @@ public String functionType() { return isOperator() ? "operator" : super.functionType(); } + @Override + protected ExpressionTranslator translator() { + return new EsqlExpressionTranslators.MatchFunctionTranslator(); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new Match(source(), field, query(), queryBuilder); + } + @Override public String functionName() { return isOperator() ? ":" : super.functionName(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java index bd79661534b76..ea21411d09173 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java @@ -7,10 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -18,6 +21,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; import java.io.IOException; import java.util.List; @@ -27,7 +31,11 @@ */ public class QueryString extends FullTextFunction { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "QStr", QueryString::new); + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "QStr", + QueryString::readFrom + ); @FunctionInfo( returnType = "boolean", @@ -44,17 +52,30 @@ public QueryString( description = "Query string in Lucene query string format." ) Expression queryString ) { - super(source, queryString, List.of(queryString)); + super(source, queryString, List.of(queryString), null); } - private QueryString(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); + public QueryString(Source source, Expression queryString, QueryBuilder queryBuilder) { + super(source, queryString, List.of(queryString), queryBuilder); + } + + private static QueryString readFrom(StreamInput in) throws IOException { + Source source = Source.readFrom((PlanStreamInput) in); + Expression query = in.readNamedWriteable(Expression.class); + QueryBuilder queryBuilder = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); + } + return new QueryString(source, query, queryBuilder); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(query()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + out.writeOptionalNamedWriteable(queryBuilder()); + } } @Override @@ -69,12 +90,21 @@ public String functionName() { @Override public Expression replaceChildren(List newChildren) { - return new QueryString(source(), newChildren.get(0)); + return new QueryString(source(), newChildren.get(0), queryBuilder()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, QueryString::new, query()); + return NodeInfo.create(this, QueryString::new, query(), queryBuilder()); } + @Override + protected ExpressionTranslator translator() { + return new EsqlExpressionTranslators.QueryStringFunctionTranslator(); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new QueryString(source(), query(), queryBuilder); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java index 125a5b02b6e1c..ff8085cd1b44b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -7,15 +7,18 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -23,6 +26,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; import java.io.IOException; import java.util.List; @@ -56,7 +60,11 @@ public Term( description = "Term you wish to find in the provided field." ) Expression termQuery ) { - super(source, termQuery, List.of(field, termQuery)); + this(source, field, termQuery, null); + } + + public Term(Source source, Expression field, Expression termQuery, QueryBuilder queryBuilder) { + super(source, termQuery, List.of(field, termQuery), queryBuilder); this.field = field; } @@ -64,7 +72,11 @@ private static Term readFrom(StreamInput in) throws IOException { Source source = Source.readFrom((PlanStreamInput) in); Expression field = in.readNamedWriteable(Expression.class); Expression query = in.readNamedWriteable(Expression.class); - return new Term(source, field, query); + QueryBuilder queryBuilder = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); + } + return new Term(source, field, query, queryBuilder); } @Override @@ -72,6 +84,9 @@ public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(field()); out.writeNamedWriteable(query()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + out.writeOptionalNamedWriteable(queryBuilder()); + } } @Override @@ -101,18 +116,28 @@ public void validate(Failures failures) { @Override public Expression replaceChildren(List newChildren) { - return new Term(source(), newChildren.get(0), newChildren.get(1)); + return new Term(source(), newChildren.get(0), newChildren.get(1), queryBuilder()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Term::new, field, query()); + return NodeInfo.create(this, Term::new, field, query(), queryBuilder()); } protected TypeResolutions.ParamOrdinal queryParamOrdinal() { return SECOND; } + @Override + protected ExpressionTranslator translator() { + return new EsqlExpressionTranslators.TermFunctionTranslator(); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new Term(source(), field, query(), queryBuilder); + } + public Expression field() { return field; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 9e40b85fd6590..347d542f5212d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -90,7 +90,7 @@ public class Bucket extends GroupingFunction implements Validatable, TwoOptional private final Expression to; @FunctionInfo( - returnType = { "double", "date" }, + returnType = { "double", "date", "date_nanos" }, description = """ Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range.""", @@ -169,7 +169,7 @@ public Bucket( Source source, @Param( name = "field", - type = { "integer", "long", "double", "date" }, + type = { "integer", "long", "double", "date", "date_nanos" }, description = "Numeric or date expression from which to derive buckets." ) Expression field, @Param( @@ -241,7 +241,7 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { - if (field.dataType() == DataType.DATETIME) { + if (field.dataType() == DataType.DATETIME || field.dataType() == DataType.DATE_NANOS) { Rounding.Prepared preparedRounding; if (buckets.dataType().isWholeNumber()) { int b = ((Number) buckets.fold()).intValue(); @@ -314,8 +314,8 @@ private double pickRounding(int buckets, double from, double to) { } // supported parameter type combinations (1st, 2nd, 3rd, 4th): - // datetime, integer, string/datetime, string/datetime - // datetime, rounding/duration, -, - + // datetime/date_nanos, integer, string/datetime, string/datetime + // datetime/date_nanos, rounding/duration, -, - // numeric, integer, numeric, numeric // numeric, numeric, -, - @Override @@ -329,7 +329,7 @@ protected TypeResolution resolveType() { return TypeResolution.TYPE_RESOLVED; } - if (fieldType == DataType.DATETIME) { + if (fieldType == DataType.DATETIME || fieldType == DataType.DATE_NANOS) { TypeResolution resolution = isType( buckets, dt -> dt.isWholeNumber() || DataType.isTemporalAmount(dt), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index ded913a78bdf1..a100dd64915f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -95,7 +95,8 @@ public boolean foldable() { @Override public Nullability nullable() { - // Both nulls and empty strings result in null values + // Null strings and strings that don't produce tokens after analysis lead to null values. + // This includes empty strings, only whitespace, (hexa)decimal numbers and stopwords. return Nullability.TRUE; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelope.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelope.java index 934991f3a8088..ca243efcc2851 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelope.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelope.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -129,7 +130,7 @@ static BytesRef fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point) { return wkb; } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return UNSPECIFIED.asWkb(envelope.get()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java index d6d710b175113..69eede1c5fac5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -114,7 +115,7 @@ static double fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point point) { return point.getX(); } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return envelope.get().getMaxX(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMin.java index a5fa11bc11b0f..b29a547ab0af6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMin.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -114,7 +115,7 @@ static double fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point point) { return point.getX(); } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return envelope.get().getMinX(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMax.java index fbbea8e024a6b..981b500bcaef7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMax.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -114,7 +115,7 @@ static double fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point point) { return point.getY(); } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return envelope.get().getMaxY(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMin.java index 1707d3b4f2fb9..882aeb30afaee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMin.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -114,7 +115,7 @@ static double fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point point) { return point.getY(); } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return envelope.get().getMinY(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java index ee51a6f391a65..d3fc9e15e2e04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java @@ -25,6 +25,9 @@ public class EsIndex implements Writeable { private final Map mapping; private final Map indexNameWithModes; + /** + * Intended for tests. Returns an index with an empty index mode map. + */ public EsIndex(String name, Map mapping) { this(name, mapping, Map.of()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java index 5e91425296822..dce828dbf192d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java @@ -14,7 +14,6 @@ public final class LogicalVerifier { - private static final PlanConsistencyChecker DEPENDENCY_CHECK = new PlanConsistencyChecker<>(); public static final LogicalVerifier INSTANCE = new LogicalVerifier(); private LogicalVerifier() {} @@ -25,7 +24,7 @@ public Failures verify(LogicalPlan plan) { Failures dependencyFailures = new Failures(); plan.forEachUp(p -> { - DEPENDENCY_CHECK.checkPlan(p, dependencyFailures); + PlanConsistencyChecker.checkPlan(p, dependencyFailures); if (failures.hasFailures() == false) { p.forEachExpression(ex -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java index 9132cf87541bb..4ec90fc1ed50a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.optimizer.rules.PlanConsistencyChecker; import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -28,7 +27,6 @@ public final class PhysicalVerifier { public static final PhysicalVerifier INSTANCE = new PhysicalVerifier(); - private static final PlanConsistencyChecker DEPENDENCY_CHECK = new PlanConsistencyChecker<>(); private PhysicalVerifier() {} @@ -44,11 +42,6 @@ public Collection verify(PhysicalPlan plan) { } plan.forEachDown(p -> { - if (p instanceof AggregateExec agg) { - var exclude = Expressions.references(agg.ordinalAttributes()); - DEPENDENCY_CHECK.checkPlan(p, exclude, depFailures); - return; - } if (p instanceof FieldExtractExec fieldExtractExec) { Attribute sourceAttribute = fieldExtractExec.sourceAttribute(); if (sourceAttribute == null) { @@ -62,7 +55,7 @@ public Collection verify(PhysicalPlan plan) { ); } } - DEPENDENCY_CHECK.checkPlan(p, depFailures); + PlanConsistencyChecker.checkPlan(p, depFailures); }); if (depFailures.hasFailures()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PlanConsistencyChecker.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PlanConsistencyChecker.java index 5101e3f73bfdf..d5bd110e8df74 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PlanConsistencyChecker.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PlanConsistencyChecker.java @@ -12,31 +12,42 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.plan.QueryPlan; +import org.elasticsearch.xpack.esql.plan.logical.BinaryPlan; +import org.elasticsearch.xpack.esql.plan.physical.BinaryExec; import java.util.HashSet; import java.util.Set; import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class PlanConsistencyChecker

    > { +public class PlanConsistencyChecker { /** * Check whether a single {@link QueryPlan} produces no duplicate attributes and its children provide all of its required * {@link QueryPlan#references() references}. Otherwise, add * {@link org.elasticsearch.xpack.esql.common.Failure Failure}s to the {@link Failures} object. */ - public void checkPlan(P p, Failures failures) { - checkPlan(p, AttributeSet.EMPTY, failures); - } - - public void checkPlan(P p, AttributeSet exclude, Failures failures) { - AttributeSet refs = p.references(); - AttributeSet input = p.inputSet(); - AttributeSet missing = refs.subtract(input).subtract(exclude); - // TODO: for Joins, we should probably check if the required fields from the left child are actually in the left child, not - // just any child (and analogously for the right child). - if (missing.isEmpty() == false) { - failures.add(fail(p, "Plan [{}] optimized incorrectly due to missing references {}", p.nodeString(), missing)); + public static void checkPlan(QueryPlan p, Failures failures) { + if (p instanceof BinaryPlan binaryPlan) { + checkMissingBinary( + p, + binaryPlan.leftReferences(), + binaryPlan.left().outputSet(), + binaryPlan.rightReferences(), + binaryPlan.right().outputSet(), + failures + ); + } else if (p instanceof BinaryExec binaryExec) { + checkMissingBinary( + p, + binaryExec.leftReferences(), + binaryExec.left().outputSet(), + binaryExec.rightReferences(), + binaryExec.right().outputSet(), + failures + ); + } else { + checkMissing(p, p.references(), p.inputSet(), "missing references", failures); } Set outputAttributeNames = new HashSet<>(); @@ -49,4 +60,29 @@ public void checkPlan(P p, AttributeSet exclude, Failures failures) { } } } + + private static void checkMissingBinary( + QueryPlan plan, + AttributeSet leftReferences, + AttributeSet leftInput, + AttributeSet rightReferences, + AttributeSet rightInput, + Failures failures + ) { + checkMissing(plan, leftReferences, leftInput, "missing references from left hand side", failures); + checkMissing(plan, rightReferences, rightInput, "missing references from right hand side", failures); + } + + private static void checkMissing( + QueryPlan plan, + AttributeSet references, + AttributeSet input, + String detailErrorMessage, + Failures failures + ) { + AttributeSet missing = references.subtract(input); + if (missing.isEmpty() == false) { + failures.add(fail(plan, "Plan [{}] optimized incorrectly due to {} {}", plan.nodeString(), detailErrorMessage, missing)); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java index fb9d3f7e2f91e..1cacebdf27cd2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; public final class PushDownAndCombineLimits extends OptimizerRules.OptimizerRule { @@ -63,8 +62,10 @@ public LogicalPlan rule(Limit limit) { } } } else if (limit.child() instanceof Join join) { - if (join.config().type() == JoinTypes.LEFT && join.right() instanceof LocalRelation) { - // This is a hash join from something like a lookup. + if (join.config().type() == JoinTypes.LEFT) { + // NOTE! This is only correct because our LEFT JOINs preserve the number of rows from the left hand side. + // This deviates from SQL semantics. In SQL, multiple matches on the right hand side lead to multiple rows in the output. + // For us, multiple matches on the right hand side are collected into multi-values. return join.replaceChildren(limit.replaceChild(join.left()), join.right()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java index 91cd7f7a15840..dbd22dd297f88 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.Arrays; @@ -30,6 +31,10 @@ public LogicalPlan right() { return right; } + public abstract AttributeSet leftReferences(); + + public abstract AttributeSet rightReferences(); + @Override public final BinaryPlan replaceChildren(List newChildren) { return replaceChildren(newChildren.get(0), newChildren.get(1)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index 6af29fb23b3bb..a2c159e506880 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -97,6 +98,16 @@ public List output() { return lazyOutput; } + @Override + public AttributeSet leftReferences() { + return Expressions.references(config().leftFields()); + } + + @Override + public AttributeSet rightReferences() { + return Expressions.references(config().rightFields()); + } + public List rightOutputFields() { AttributeSet leftInputs = left().outputSet(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java index 35f45250ed270..3c2d49567813c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java @@ -184,7 +184,9 @@ public List output() { @Override protected AttributeSet computeReferences() { - return mode.isInputPartial() ? new AttributeSet(intermediateAttributes) : Aggregate.computeReferences(aggregates, groupings); + return mode.isInputPartial() + ? new AttributeSet(intermediateAttributes) + : Aggregate.computeReferences(aggregates, groupings).subtract(new AttributeSet(ordinalAttributes())); } /** Returns the attributes that can be loaded from ordinals -- no explicit extraction is needed */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/BinaryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/BinaryExec.java index 6f200bad17a72..9a1b76205b595 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/BinaryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/BinaryExec.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.plan.physical; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.tree.Source; import java.io.IOException; @@ -40,6 +41,10 @@ public PhysicalPlan right() { return right; } + public abstract AttributeSet leftReferences(); + + public abstract AttributeSet rightReferences(); + @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java index 5ae3702993fcb..362c83bf76213 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java @@ -119,6 +119,16 @@ protected AttributeSet computeReferences() { return Expressions.references(leftFields); } + @Override + public AttributeSet leftReferences() { + return Expressions.references(leftFields); + } + + @Override + public AttributeSet rightReferences() { + return Expressions.references(rightFields); + } + @Override public HashJoinExec replaceChildren(PhysicalPlan left, PhysicalPlan right) { return new HashJoinExec(source(), left, right, matchFields, leftFields, rightFields, output); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java index 26fd12447e664..2aff38993aa98 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java @@ -119,6 +119,21 @@ protected AttributeSet computeReferences() { return Expressions.references(leftFields); } + @Override + public AttributeSet leftReferences() { + return Expressions.references(leftFields); + } + + @Override + public AttributeSet rightReferences() { + // TODO: currently it's hard coded that we add all fields from the lookup index. But the output we "officially" get from the right + // hand side is inconsistent: + // - After logical optimization, there's a FragmentExec with an EsRelation on the right hand side with all the fields. + // - After local physical optimization, there's just an EsQueryExec here, with no fields other than _doc mentioned and we don't + // insert field extractions in the plan, either. + return AttributeSet.EMPTY; + } + @Override public LookupJoinExec replaceChildren(PhysicalPlan left, PhysicalPlan right) { return new LookupJoinExec(source(), left, right, leftFields, rightFields, addedFields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 1f55e293b8e75..1918e3036e2b0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent; import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; @@ -66,7 +67,7 @@ final class AggregateMapper { private static final List NUMERIC = List.of("Int", "Long", "Double"); - private static final List SPATIAL = List.of("GeoPoint", "CartesianPoint"); + private static final List SPATIAL_EXTRA_CONFIGS = List.of("SourceValues", "DocValues"); /** List of all mappable ESQL agg functions (excludes surrogates like AVG = SUM/COUNT). */ private static final List> AGG_FUNCTIONS = List.of( @@ -77,6 +78,7 @@ final class AggregateMapper { Min.class, Percentile.class, SpatialCentroid.class, + SpatialExtent.class, StdDev.class, Sum.class, Values.class, @@ -89,7 +91,11 @@ final class AggregateMapper { ); /** Record of agg Class, type, and grouping (or non-grouping). */ - private record AggDef(Class aggClazz, String type, String extra, boolean grouping) {} + private record AggDef(Class aggClazz, String type, String extra, boolean grouping) { + public AggDef withoutExtra() { + return new AggDef(aggClazz, type, "", grouping); + } + } /** Map of AggDef types to intermediate named expressions. */ private static final Map> MAPPER = AGG_FUNCTIONS.stream() @@ -145,7 +151,7 @@ private static List entryForAgg(String aggAlias, AggregateFunct var aggDef = new AggDef( aggregateFunction.getClass(), dataTypeToString(aggregateFunction.field().dataType(), aggregateFunction.getClass()), - aggregateFunction instanceof SpatialCentroid ? "SourceValues" : "", + aggregateFunction instanceof SpatialAggregateFunction ? "SourceValues" : "", grouping ); var is = getNonNull(aggDef); @@ -154,7 +160,7 @@ private static List entryForAgg(String aggAlias, AggregateFunct /** Gets the agg from the mapper - wrapper around map::get for more informative failure.*/ private static List getNonNull(AggDef aggDef) { - var l = MAPPER.get(aggDef); + var l = MAPPER.getOrDefault(aggDef, MAPPER.get(aggDef.withoutExtra())); if (l == null) { throw new EsqlIllegalArgumentException("Cannot find intermediate state for: " + aggDef); } @@ -170,9 +176,14 @@ private static Stream, Tuple>> typeAndNames(Class types = List.of("Boolean", "Int", "Long", "Double", "Ip", "BytesRef"); } else if (clazz == Count.class) { types = List.of(""); // no extra type distinction - } else if (SpatialAggregateFunction.class.isAssignableFrom(clazz)) { - types = SPATIAL; - extraConfigs = List.of("SourceValues", "DocValues"); + } else if (clazz == SpatialCentroid.class) { + types = List.of("GeoPoint", "CartesianPoint"); + extraConfigs = SPATIAL_EXTRA_CONFIGS; + } else if (clazz == SpatialExtent.class) { + return Stream.concat( + combine(clazz, List.of("GeoPoint", "CartesianPoint"), SPATIAL_EXTRA_CONFIGS), + combine(clazz, List.of("GeoShape", "CartesianShape"), List.of("")) + ); } else if (Values.class.isAssignableFrom(clazz)) { // TODO can't we figure this out from the function itself? types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); @@ -188,6 +199,10 @@ private static Stream, Tuple>> typeAndNames(Class assert false : "unknown aggregate type " + clazz; throw new IllegalArgumentException("unknown aggregate type " + clazz); } + return combine(clazz, types, extraConfigs); + } + + private static Stream, Tuple>> combine(Class clazz, List types, List extraConfigs) { return combinations(types, extraConfigs).map(combo -> new Tuple<>(clazz, combo)); } @@ -219,6 +234,15 @@ private static List lookupIntermediateState(AggDef aggDef /** Looks up the intermediate state method for a given class, type, and grouping. */ private static MethodHandle lookup(Class clazz, String type, String extra, boolean grouping) { + try { + return lookupRetry(clazz, type, extra, grouping); + } catch (IllegalAccessException | NoSuchMethodException | ClassNotFoundException e) { + throw new EsqlIllegalArgumentException(e); + } + } + + private static MethodHandle lookupRetry(Class clazz, String type, String extra, boolean grouping) throws IllegalAccessException, + NoSuchMethodException, ClassNotFoundException { try { return MethodHandles.lookup() .findStatic( @@ -226,8 +250,14 @@ private static MethodHandle lookup(Class clazz, String type, String extra, bo "intermediateStateDesc", MethodType.methodType(List.class) ); - } catch (IllegalAccessException | NoSuchMethodException | ClassNotFoundException e) { - throw new EsqlIllegalArgumentException(e); + } catch (NoSuchMethodException ignore) { + // Retry without the extra information. + return MethodHandles.lookup() + .findStatic( + Class.forName(determineAggName(clazz, type, "", grouping)), + "intermediateStateDesc", + MethodType.methodType(List.class) + ); } } @@ -301,8 +331,10 @@ private static String dataTypeToString(DataType type, Class aggClass) { case DataType.KEYWORD, DataType.IP, DataType.VERSION, DataType.TEXT, DataType.SEMANTIC_TEXT -> "BytesRef"; case GEO_POINT -> "GeoPoint"; case CARTESIAN_POINT -> "CartesianPoint"; + case GEO_SHAPE -> "GeoShape"; + case CARTESIAN_SHAPE -> "CartesianShape"; case UNSUPPORTED, NULL, UNSIGNED_LONG, SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, SOURCE, DATE_PERIOD, TIME_DURATION, - CARTESIAN_SHAPE, GEO_SHAPE, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException( + DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException( "illegal agg type: " + type.typeName() ); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 7820f0f657f7f..43bbf9a5f4ff1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; @@ -100,7 +101,11 @@ public final class EsqlExpressionTranslators { ); public static Query toQuery(Expression e, TranslatorHandler handler) { + if (e instanceof TranslationAware ta) { + return ta.asQuery(handler); + } Query translation = null; + for (ExpressionTranslator translator : QUERY_TRANSLATORS) { translation = translator.translate(e, handler); if (translation != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlLicenseChecker.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlLicenseChecker.java new file mode 100644 index 0000000000000..0a52ee75de3b2 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlLicenseChecker.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.session; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestStatus; + +public class EsqlLicenseChecker { + + public static final LicensedFeature.Momentary CCS_FEATURE = LicensedFeature.momentary( + null, + "esql-ccs", + License.OperationMode.ENTERPRISE + ); + + /** + * Only call this method once you know the user is doing a cross-cluster query, as it will update + * the license_usage timestamp for the esql-ccs feature if the license is Enterprise (or Trial). + * @param licenseState + * @return true if the user has a license that allows ESQL CCS. + */ + public static boolean isCcsAllowed(XPackLicenseState licenseState) { + if (licenseState == null) { + return false; + } + return CCS_FEATURE.check(licenseState); + } + + /** + * @param licenseState existing license state. Need to extract info on the current installed license. + * @return ElasticsearchStatusException with an error message informing the caller what license is needed + * to run ES|QL cross-cluster searches and what license (if any) was found. + */ + public static ElasticsearchStatusException invalidLicenseForCcsException(XPackLicenseState licenseState) { + String message = "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: "; + if (licenseState == null) { + message += "none"; + } else { + message += licenseState.statusDescription(); + } + return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 4f7c620bc8d12..83480f6651abf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -298,6 +298,9 @@ public void analyzedPlan( .map(e -> new EnrichPolicyResolver.UnresolvedPolicy((String) e.policyName().fold(), e.mode())) .collect(Collectors.toSet()); final List indices = preAnalysis.indices; + + EsqlSessionCCSUtils.checkForCcsLicense(indices, indicesExpressionGrouper, verifier.licenseState()); + // TODO: make a separate call for lookup indices final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( indices.stream().flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))).toArray(String[]::new) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java index 4fe2fef7e3f45..662572c466511 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java @@ -9,17 +9,24 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.transport.RemoteClusterAware; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.analysis.Analyzer; +import org.elasticsearch.xpack.esql.analysis.TableInfo; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; @@ -255,6 +262,9 @@ static boolean missingIndicesIsFatal(String clusterAlias, EsqlExecutionInfo exec } private static boolean concreteIndexRequested(String indexExpression) { + if (Strings.isNullOrBlank(indexExpression)) { + return false; + } for (String expr : indexExpression.split(",")) { if (expr.charAt(0) == '<' || expr.startsWith("-<")) { // skip date math expressions @@ -288,4 +298,37 @@ static void updateExecutionInfoAtEndOfPlanning(EsqlExecutionInfo execInfo) { } } } + + /** + * Checks the index expression for the presence of remote clusters. If found, it will ensure that the caller + * has a valid Enterprise (or Trial) license on the querying cluster. + * @param indices index expression requested by user + * @param indicesGrouper grouper of index expressions by cluster alias + * @param licenseState license state on the querying cluster + * @throws org.elasticsearch.ElasticsearchStatusException if the license is not valid (or present) for ES|QL CCS search. + */ + public static void checkForCcsLicense( + List indices, + IndicesExpressionGrouper indicesGrouper, + XPackLicenseState licenseState + ) { + for (TableInfo tableInfo : indices) { + Map groupedIndices; + try { + groupedIndices = indicesGrouper.groupIndices(IndicesOptions.DEFAULT, tableInfo.id().index()); + } catch (NoSuchRemoteClusterException e) { + if (EsqlLicenseChecker.isCcsAllowed(licenseState)) { + throw e; + } else { + throw EsqlLicenseChecker.invalidLicenseForCcsException(licenseState); + } + } + // check if it is a cross-cluster query + if (groupedIndices.size() > 1 || groupedIndices.containsKey(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY) == false) { + if (EsqlLicenseChecker.isCcsAllowed(licenseState) == false) { + throw EsqlLicenseChecker.invalidLicenseForCcsException(licenseState); + } + } + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 2834e5f3f8358..c11ef8615eb72 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -263,7 +263,7 @@ public final void test() throws Throwable { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V4.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V5.capabilityName()) ); assumeFalse( "can't use TERM function in csv tests", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java index 4e89a09db9ed4..5e79e40b7e938 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; @@ -104,6 +105,11 @@ public static LogicalPlan analyze(String query, String mapping, QueryParams para return analyzer.analyze(plan); } + public static IndexResolution loadMapping(String resource, String indexName, IndexMode indexMode) { + EsIndex test = new EsIndex(indexName, EsqlTestUtils.loadMapping(resource), Map.of(indexName, indexMode)); + return IndexResolution.valid(test); + } + public static IndexResolution loadMapping(String resource, String indexName) { EsIndex test = new EsIndex(indexName, EsqlTestUtils.loadMapping(resource)); return IndexResolution.valid(test); @@ -118,7 +124,7 @@ public static IndexResolution expandedDefaultIndexResolution() { } public static IndexResolution defaultLookupResolution() { - return loadMapping("mapping-languages.json", "languages_lookup"); + return loadMapping("mapping-languages.json", "languages_lookup", IndexMode.LOOKUP); } public static EnrichResolution defaultEnrichResolution() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 30aec707df541..cfff245b19244 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2139,7 +2139,7 @@ public void testLookupMatchTypeWrong() { } public void testLookupJoinUnknownIndex() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V5.isEnabled()); String errorMessage = "Unknown index [foobar]"; IndexResolution missingLookupIndex = IndexResolution.invalid(errorMessage); @@ -2168,7 +2168,7 @@ public void testLookupJoinUnknownIndex() { } public void testLookupJoinUnknownField() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V5.isEnabled()); String query = "FROM test | LOOKUP JOIN languages_lookup ON last_name"; String errorMessage = "1:45: Unknown column [last_name] in right side of join"; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 84dcdbadef9f0..4b916106165fb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1869,38 +1869,35 @@ public void testIntervalAsString() { ); } - public void testCategorizeSingleGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - - query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); - query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); + public void testCategorizeOnlyFirstGrouping() { + query("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name)"); + query("FROM test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); + query("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), emp_no"); + query("FROM test | STATS COUNT(*) BY a = CATEGORIZE(first_name), b = emp_no"); assertEquals( - "1:31: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings", - error("from test | STATS COUNT(*) BY CATEGORIZE(first_name), emp_no") + "1:39: CATEGORIZE grouping function [CATEGORIZE(first_name)] can only be in the first grouping expression", + error("FROM test | STATS COUNT(*) BY emp_no, CATEGORIZE(first_name)") ); assertEquals( - "1:39: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings", - error("FROM test | STATS COUNT(*) BY emp_no, CATEGORIZE(first_name)") + "1:55: CATEGORIZE grouping function [CATEGORIZE(last_name)] can only be in the first grouping expression", + error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), CATEGORIZE(last_name)") ); assertEquals( - "1:35: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings", - error("FROM test | STATS COUNT(*) BY a = CATEGORIZE(first_name), b = emp_no") + "1:55: CATEGORIZE grouping function [CATEGORIZE(first_name)] can only be in the first grouping expression", + error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), CATEGORIZE(first_name)") ); assertEquals( - "1:31: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings\n" - + "line 1:55: cannot use CATEGORIZE grouping function [CATEGORIZE(last_name)] with multiple groupings", - error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), CATEGORIZE(last_name)") + "1:63: CATEGORIZE grouping function [CATEGORIZE(last_name)] can only be in the first grouping expression", + error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), emp_no, CATEGORIZE(last_name)") ); assertEquals( - "1:31: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings", - error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), CATEGORIZE(first_name)") + "1:63: CATEGORIZE grouping function [CATEGORIZE(first_name)] can only be in the first grouping expression", + error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), emp_no, CATEGORIZE(first_name)") ); } public void testCategorizeNestedGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); assertEquals( @@ -1914,8 +1911,6 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); query("from test | STATS MV_COUNT(CATEGORIZE(first_name)), COUNT(*) BY cat = CATEGORIZE(first_name)"); query("from test | STATS MV_COUNT(CATEGORIZE(first_name)), COUNT(*) BY CATEGORIZE(first_name)"); @@ -1944,8 +1939,6 @@ public void testCategorizeWithinAggregations() { } public void testCategorizeWithFilteredAggregations() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - query("FROM test | STATS COUNT(*) WHERE first_name == \"John\" BY CATEGORIZE(last_name)"); query("FROM test | STATS COUNT(*) WHERE last_name == \"Doe\" BY CATEGORIZE(last_name)"); @@ -1971,7 +1964,7 @@ public void testSortByAggregate() { } public void testLookupJoinDataTypeMismatch() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V5.isEnabled()); query("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/RectangleMatcher.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/RectangleMatcher.java new file mode 100644 index 0000000000000..48fbc9c8e0378 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/RectangleMatcher.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression; + +import org.elasticsearch.compute.aggregation.spatial.PointType; +import org.elasticsearch.geometry.Rectangle; +import org.hamcrest.Description; +import org.hamcrest.Matchers; +import org.hamcrest.TypeSafeMatcher; + +/** + * Example usage: assertThat(actualRectangle, RectangleMatcher.closeTo(expectedRectangle, 0.0001, PointType.CARTESIAN));, or it + * can be used as a parameter to {@link WellKnownBinaryBytesRefMatcher}. + */ +public class RectangleMatcher extends TypeSafeMatcher { + private final Rectangle r; + private final PointType pointType; + private final double error; + + public static TypeSafeMatcher closeTo(Rectangle r, double error, PointType pointType) { + return new RectangleMatcher(r, error, pointType); + } + + private RectangleMatcher(Rectangle r, double error, PointType pointType) { + this.r = r; + this.pointType = pointType; + this.error = error; + } + + @Override + protected boolean matchesSafely(Rectangle other) { + // For geo bounds, longitude of (-180, 180) and (epsilon, -epsilon) are actually very close, since both encompass the entire globe. + boolean wrapAroundWorkAround = pointType == PointType.GEO && r.getMinX() >= r.getMaxX(); + boolean matchMinX = Matchers.closeTo(r.getMinX(), error).matches(other.getMinX()) + || (wrapAroundWorkAround && Matchers.closeTo(r.getMinX() - 180, error).matches(other.getMinX())) + || (wrapAroundWorkAround && Matchers.closeTo(r.getMinX(), error).matches(other.getMinX() - 180)); + boolean matchMaxX = Matchers.closeTo(r.getMaxX(), error).matches(other.getMaxX()) + || (wrapAroundWorkAround && Matchers.closeTo(r.getMaxX() + 180, error).matches(other.getMaxX())) + || (wrapAroundWorkAround && Matchers.closeTo(r.getMaxX(), error).matches(other.getMaxX() + 180)); + + return matchMinX + && matchMaxX + && Matchers.closeTo(r.getMaxY(), error).matches(other.getMaxY()) + && Matchers.closeTo(r.getMinY(), error).matches(other.getMinY()); + } + + @Override + public void describeMismatchSafely(Rectangle rectangle, Description description) { + description.appendText("was ").appendValue(rectangle); + } + + @Override + public void describeTo(Description description) { + description.appendValue(" " + r); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/WellKnownBinaryBytesRefMatcher.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/WellKnownBinaryBytesRefMatcher.java new file mode 100644 index 0000000000000..535bb820458cd --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/WellKnownBinaryBytesRefMatcher.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** A wrapper for matching geometries encoded as WKB in a BytesRef. */ +public class WellKnownBinaryBytesRefMatcher extends TypeSafeMatcher { + private final Matcher matcher; + + public WellKnownBinaryBytesRefMatcher(Matcher matcher) { + this.matcher = matcher; + } + + @Override + public boolean matchesSafely(BytesRef bytesRef) { + return matcher.matches(fromBytesRef(bytesRef)); + } + + @Override + public void describeMismatchSafely(BytesRef bytesRef, Description description) { + matcher.describeMismatch(fromBytesRef(bytesRef), description); + } + + @SuppressWarnings("unchecked") + private G fromBytesRef(BytesRef bytesRef) { + return (G) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false /* coerce */, bytesRef.bytes, bytesRef.offset, bytesRef.length); + } + + @Override + public void describeTo(Description description) { + matcher.describeTo(description); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index df1675ba22568..c086245d6fd61 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -277,9 +277,11 @@ private void evaluate(Expression evaluableExpression) { } private void resolveExpression(Expression expression, Consumer onAggregator, Consumer onEvaluableExpression) { - logger.info( - "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) - ); + String valuesString = testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")); + if (valuesString.length() > 200) { + valuesString = valuesString.substring(0, 200) + "..."; + } + logger.info("Test Values: " + valuesString); if (testCase.getExpectedTypeError() != null) { assertTypeResolutionFailure(expression); return; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index 775ca45bfa124..bb0d2e57c3440 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -9,9 +9,11 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.versionfield.Version; @@ -19,11 +21,11 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.List; +import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomList; -import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.TypedDataSupplier; @@ -263,9 +265,7 @@ public static List dateCases(int minRows, int maxRows) { } /** - * * Generate cases for {@link DataType#DATE_NANOS}. - * */ public static List dateNanosCases(int minRows, int maxRows) { List cases = new ArrayList<>(); @@ -370,53 +370,58 @@ public static List versionCases(int minRows, int maxRows) { return cases; } - public static List geoPointCases(int minRows, int maxRows, boolean withAltitude) { - List cases = new ArrayList<>(); + public enum IncludingAltitude { + YES, + NO + } - addSuppliers( - cases, + public static List geoPointCases(int minRows, int maxRows, IncludingAltitude withAltitude) { + return spatialCases(minRows, maxRows, withAltitude, "geo_point", DataType.GEO_POINT, GeometryTestUtils::randomPoint); + } + + public static List geoShapeCasesWithoutCircle(int minRows, int maxRows, IncludingAltitude includingAltitude) { + return spatialCases( minRows, maxRows, - "", - DataType.GEO_POINT, - () -> GEO.asWkb(GeometryTestUtils.randomPoint(false)) + includingAltitude, + "geo_shape", + DataType.GEO_SHAPE, + b -> GeometryTestUtils.randomGeometryWithoutCircle(0, b) ); - - if (withAltitude) { - addSuppliers( - cases, - minRows, - maxRows, - "", - DataType.GEO_POINT, - () -> GEO.asWkb(GeometryTestUtils.randomPoint(true)) - ); - } - - return cases; } - public static List cartesianPointCases(int minRows, int maxRows, boolean withAltitude) { - List cases = new ArrayList<>(); - - addSuppliers( - cases, + public static List cartesianShapeCasesWithoutCircle(int minRows, int maxRows, IncludingAltitude includingAltitude) { + return spatialCases( minRows, maxRows, - "", - DataType.CARTESIAN_POINT, - () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(false)) + includingAltitude, + "geo_shape", + DataType.CARTESIAN_SHAPE, + b -> ShapeTestUtils.randomGeometryWithoutCircle(0, b) ); + } - if (withAltitude) { - addSuppliers( - cases, - minRows, - maxRows, - "", - DataType.CARTESIAN_POINT, - () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(true)) - ); + public static List cartesianPointCases(int minRows, int maxRows, IncludingAltitude includingAltitude) { + return spatialCases(minRows, maxRows, includingAltitude, "cartesian_point", DataType.CARTESIAN_POINT, ShapeTestUtils::randomPoint); + } + + @SuppressWarnings("fallthrough") + private static List spatialCases( + int minRows, + int maxRows, + IncludingAltitude includingAltitude, + String name, + DataType type, + Function gen + ) { + List cases = new ArrayList<>(); + + switch (includingAltitude) { + case YES: + addSuppliers(cases, minRows, maxRows, Strings.format("", name), type, () -> GEO.asWkb(gen.apply(true))); + // Explicit fallthrough: always generate a case without altitude. + case NO: + addSuppliers(cases, minRows, maxRows, Strings.format("", name), type, () -> GEO.asWkb(gen.apply(false))); } return cases; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java index 131072acff870..0485714959f63 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier.IncludingAltitude; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -44,8 +45,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), - MultiRowTestCaseSupplier.geoPointCases(1, 1000, true), - MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, true), + MultiRowTestCaseSupplier.geoPointCases(1, 1000, IncludingAltitude.YES), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, IncludingAltitude.YES), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.KEYWORD), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.SEMANTIC_TEXT) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java index 15ea029a05554..b92b32aa7ad09 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier.IncludingAltitude; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; @@ -41,8 +42,8 @@ public SpatialCentroidTests(@Name("TestCase") Supplier parameters() { var suppliers = Stream.of( - MultiRowTestCaseSupplier.geoPointCases(1, 1000, true), - MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, true) + MultiRowTestCaseSupplier.geoPointCases(1, 1000, IncludingAltitude.NO), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, IncludingAltitude.NO) ).flatMap(List::stream).map(SpatialCentroidTests::makeSupplier).toList(); // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java new file mode 100644 index 0000000000000..a1faa537ba052 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.spatial.PointType; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.RectangleMatcher; +import org.elasticsearch.xpack.esql.expression.WellKnownBinaryBytesRefMatcher; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier.IncludingAltitude; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Stream; + +@FunctionName("st_extent_agg") +public class SpatialExtentTests extends AbstractAggregationTestCase { + public SpatialExtentTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = Stream.of( + MultiRowTestCaseSupplier.geoPointCases(1, 1000, IncludingAltitude.NO), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, IncludingAltitude.NO), + MultiRowTestCaseSupplier.geoShapeCasesWithoutCircle(1, 1000, IncludingAltitude.NO), + MultiRowTestCaseSupplier.cartesianShapeCasesWithoutCircle(1, 1000, IncludingAltitude.NO) + ).flatMap(List::stream).map(SpatialExtentTests::makeSupplier).toList(); + + // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. + // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialExtent(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { + PointType pointType = switch (fieldSupplier.type()) { + case DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE -> PointType.CARTESIAN; + case DataType.GEO_POINT, DataType.GEO_SHAPE -> PointType.GEO; + default -> throw new IllegalArgumentException("Unsupported type: " + fieldSupplier.type()); + }; + var pointVisitor = switch (pointType) { + case CARTESIAN -> new SpatialEnvelopeVisitor.CartesianPointVisitor(); + case GEO -> new SpatialEnvelopeVisitor.GeoPointVisitor(WrapLongitude.WRAP); + }; + + var fieldTypedData = fieldSupplier.get(); + DataType expectedType = DataType.isSpatialGeo(fieldTypedData.type()) ? DataType.GEO_SHAPE : DataType.CARTESIAN_SHAPE; + fieldTypedData.multiRowData() + .stream() + .map(value -> (BytesRef) value) + .map(value -> WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, value.bytes, value.offset, value.length)) + .forEach(g -> g.visit(new SpatialEnvelopeVisitor(pointVisitor))); + assert pointVisitor.isValid(); + Rectangle result = pointVisitor.getResult(); + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "SpatialExtent[field=Attribute[channel=0]]", + expectedType, + new WellKnownBinaryBytesRefMatcher<>( + RectangleMatcher.closeTo( + new Rectangle( + // Since we use integers locally which are later decoded to doubles, all computation is effectively done using + // floats, not doubles. + (float) result.getMinX(), + (float) result.getMaxX(), + (float) result.getMaxY(), + (float) result.getMinY() + ), + 1e-3, + pointType + ) + ) + ); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java index 7e7d91cdf76f4..f01b06c23e8a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java @@ -12,15 +12,19 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.logging.LogManager; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; +import org.hamcrest.Matchers; import java.time.Duration; +import java.time.Instant; import java.time.Period; import java.util.ArrayList; import java.util.List; @@ -38,6 +42,7 @@ public BucketTests(@Name("TestCase") Supplier testCas public static Iterable parameters() { List suppliers = new ArrayList<>(); dateCases(suppliers, "fixed date", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z")); + dateNanosCases(suppliers, "fixed date nanos", () -> DateUtils.toLong(Instant.parse("2023-02-17T09:00:00.00Z"))); dateCasesWithSpan( suppliers, "fixed date with period", @@ -54,6 +59,22 @@ public static Iterable parameters() { Duration.ofDays(1L), "[86400000 in Z][fixed]" ); + dateNanosCasesWithSpan( + suppliers, + "fixed date nanos with period", + () -> DateUtils.toLong(Instant.parse("2023-01-01T00:00:00.00Z")), + DataType.DATE_PERIOD, + Period.ofYears(1), + "[YEAR_OF_CENTURY in Z][fixed to midnight]" + ); + dateNanosCasesWithSpan( + suppliers, + "fixed date nanos with duration", + () -> DateUtils.toLong(Instant.parse("2023-02-17T09:00:00.00Z")), + DataType.TIME_DURATION, + Duration.ofDays(1L), + "[86400000 in Z][fixed]" + ); numberCases(suppliers, "fixed long", DataType.LONG, () -> 100L); numberCasesWithSpan(suppliers, "fixed long with span", DataType.LONG, () -> 100L); numberCases(suppliers, "fixed int", DataType.INTEGER, () -> 100); @@ -142,6 +163,62 @@ private static void dateCasesWithSpan( })); } + private static void dateNanosCasesWithSpan( + List suppliers, + String name, + LongSupplier date, + DataType spanType, + Object span, + String spanStr + ) { + suppliers.add(new TestCaseSupplier(name, List.of(DataType.DATE_NANOS, spanType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataType.DATE_NANOS, "field")); + args.add(new TestCaseSupplier.TypedData(span, spanType, "buckets").forceLiteral()); + return new TestCaseSupplier.TestCase( + args, + Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATE_NANOS, + resultsMatcher(args) + ); + })); + } + + private static void dateNanosCases(List suppliers, String name, LongSupplier date) { + for (DataType fromType : DATE_BOUNDS_TYPE) { + for (DataType toType : DATE_BOUNDS_TYPE) { + suppliers.add(new TestCaseSupplier(name, List.of(DataType.DATE_NANOS, DataType.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataType.DATE_NANOS, "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataType.INTEGER, "buckets").forceLiteral()); + args.add(dateBound("from", fromType, "2023-02-01T00:00:00.00Z")); + args.add(dateBound("to", toType, "2023-03-01T09:00:00.00Z")); + return new TestCaseSupplier.TestCase( + args, + Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATE_NANOS, + resultsMatcher(args) + ); + })); + // same as above, but a low bucket count and datetime bounds that match it (at hour span) + suppliers.add(new TestCaseSupplier(name, List.of(DataType.DATE_NANOS, DataType.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataType.DATE_NANOS, "field")); + args.add(new TestCaseSupplier.TypedData(4, DataType.INTEGER, "buckets").forceLiteral()); + args.add(dateBound("from", fromType, "2023-02-17T09:00:00Z")); + args.add(dateBound("to", toType, "2023-02-17T12:00:00Z")); + return new TestCaseSupplier.TestCase( + args, + Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATE_NANOS, + equalTo(Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).build().prepareForUnknown().round(date.getAsLong())) + ); + })); + } + } + } + private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataType.INTEGER, DataType.LONG, DataType.DOUBLE }; private static void numberCases(List suppliers, String name, DataType numberType, Supplier number) { @@ -221,7 +298,19 @@ private static TestCaseSupplier.TypedData keywordDateLiteral(String name, DataTy private static Matcher resultsMatcher(List typedData) { if (typedData.get(0).type() == DataType.DATETIME) { long millis = ((Number) typedData.get(0).data()).longValue(); - return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + long expected = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis); + LogManager.getLogger(getTestClass()).info("Expected: " + Instant.ofEpochMilli(expected)); + LogManager.getLogger(getTestClass()).info("Input: " + Instant.ofEpochMilli(millis)); + return equalTo(expected); + } + if (typedData.get(0).type() == DataType.DATE_NANOS) { + long nanos = ((Number) typedData.get(0).data()).longValue(); + long expected = DateUtils.toNanoSeconds( + Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(DateUtils.toMilliSeconds(nanos)) + ); + LogManager.getLogger(getTestClass()).info("Expected: " + DateUtils.toInstant(expected)); + LogManager.getLogger(getTestClass()).info("Input: " + DateUtils.toInstant(nanos)); + return equalTo(expected); } return equalTo(((Number) typedData.get(0).data()).doubleValue()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java index ac87d45491447..9f629d9127673 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -74,7 +75,9 @@ private static BytesRef valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point) { return wkb; } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return UNSPECIFIED.asWkb(envelope.get()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java index dc6e61e44f599..9205879fa1cb9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -61,7 +62,9 @@ private static double valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point point) { return point.getX(); } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return envelope.get().getMaxX(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java index 8c06d18b1e281..3603bff9656fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -61,7 +62,9 @@ private static double valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point point) { return point.getX(); } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return envelope.get().getMinX(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java index 7222d7517f7ff..cb2a03c3a9473 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -61,7 +62,9 @@ private static double valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point point) { return point.getY(); } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return envelope.get().getMaxY(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java index 843c7bb649114..0c191f6dc4c5b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -61,7 +62,9 @@ private static double valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point point) { return point.getY(); } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return envelope.get().getMinY(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c2a26845d4e88..7e498eb6654b9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; @@ -41,6 +40,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -113,7 +113,9 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -139,6 +141,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.TWO; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.emptySource; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.fieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.localSource; @@ -146,6 +149,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultLookupResolution; import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; @@ -218,7 +222,13 @@ public static void init() { EsIndex test = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(test); analyzer = new Analyzer( - new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), + new AnalyzerContext( + EsqlTestUtils.TEST_CFG, + new EsqlFunctionRegistry(), + getIndexResult, + defaultLookupResolution(), + enrichResolution + ), TEST_VERIFIER ); @@ -1212,8 +1222,6 @@ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUsedInAgg * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] */ public void testCombineProjectionWithCategorizeGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - var plan = plan(""" from test | eval k = first_name, k1 = k @@ -1294,6 +1302,26 @@ public void testCombineLimits() { ); } + public void testPushdownLimitsPastLeftJoin() { + var leftChild = emptySource(); + var rightChild = new LocalRelation(Source.EMPTY, List.of(fieldAttribute()), LocalSupplier.EMPTY); + assertNotEquals(leftChild, rightChild); + + var joinConfig = new JoinConfig(JoinTypes.LEFT, List.of(), List.of(), List.of()); + var join = switch (randomIntBetween(0, 2)) { + case 0 -> new Join(EMPTY, leftChild, rightChild, joinConfig); + case 1 -> new LookupJoin(EMPTY, leftChild, rightChild, joinConfig); + case 2 -> new InlineJoin(EMPTY, leftChild, rightChild, joinConfig); + default -> throw new IllegalArgumentException(); + }; + + var limit = new Limit(EMPTY, L(10), join); + + var optimizedPlan = new PushDownAndCombineLimits().rule(limit); + + assertEquals(join.replaceChildren(limit.replaceChild(join.left()), join.right()), optimizedPlan); + } + public void testMultipleCombineLimits() { var numberOfLimits = randomIntBetween(3, 10); var minimum = randomIntBetween(10, 99); @@ -3949,8 +3977,6 @@ public void testNestedExpressionsInGroups() { * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testNestedExpressionsInGroupsWithCategorize() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - var plan = optimizedPlan(""" from test | stats c = count(salary) by CATEGORIZE(CONCAT(first_name, "abc")) @@ -4877,6 +4903,26 @@ public void testPlanSanityCheck() throws Exception { assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references [salary")); } + public void testPlanSanityCheckWithBinaryPlans() throws Exception { + var plan = optimizedPlan(""" + FROM test + | RENAME languages AS language_code + | LOOKUP JOIN languages_lookup ON language_code + """); + + var project = as(plan, Project.class); + var limit = as(project.child(), Limit.class); + var join = as(limit.child(), Join.class); + + var joinWithInvalidLeftPlan = join.replaceChildren(join.right(), join.right()); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(joinWithInvalidLeftPlan)); + assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references from left hand side [language_code")); + + var joinWithInvalidRightPlan = join.replaceChildren(join.left(), join.left()); + e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(joinWithInvalidRightPlan)); + assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references from right hand side [language_code")); + } + // https://github.com/elastic/elasticsearch/issues/104995 public void testNoWrongIsNotNullPruning() { var plan = optimizedPlan(""" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 9682bb1c8b076..dc3ae0a3388cb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -64,6 +65,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; @@ -115,6 +117,7 @@ import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -155,6 +158,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultLookupResolution; import static org.elasticsearch.xpack.esql.core.expression.Expressions.name; import static org.elasticsearch.xpack.esql.core.expression.Expressions.names; import static org.elasticsearch.xpack.esql.core.expression.function.scalar.FunctionTestUtils.l; @@ -251,7 +255,7 @@ public void init() { "mapping-airports_no_doc_values.json", functionRegistry, enrichResolution, - new TestConfigurableSearchStats().exclude(Config.DOC_VALUES, "location") + new TestConfigurableSearchStats().exclude(Config.DOC_VALUES, "location").exclude(Config.DOC_VALUES, "city_location") ); this.airportsNotIndexed = makeTestDataSource( "airports-not-indexed", @@ -281,16 +285,30 @@ TestDataSource makeTestDataSource( String indexName, String mappingFileName, EsqlFunctionRegistry functionRegistry, + IndexResolution lookupResolution, EnrichResolution enrichResolution, SearchStats stats ) { Map mapping = loadMapping(mappingFileName); EsIndex index = new EsIndex(indexName, mapping, Map.of("test", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(index); - Analyzer analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); + Analyzer analyzer = new Analyzer( + new AnalyzerContext(config, functionRegistry, getIndexResult, lookupResolution, enrichResolution), + TEST_VERIFIER + ); return new TestDataSource(mapping, index, analyzer, stats); } + TestDataSource makeTestDataSource( + String indexName, + String mappingFileName, + EsqlFunctionRegistry functionRegistry, + EnrichResolution enrichResolution, + SearchStats stats + ) { + return makeTestDataSource(indexName, mappingFileName, functionRegistry, defaultLookupResolution(), enrichResolution, stats); + } + TestDataSource makeTestDataSource( String indexName, String mappingFileName, @@ -2312,6 +2330,39 @@ public void testVerifierOnMissingReferences() { assertThat(e.getMessage(), containsString(" > 10[INTEGER]]] optimized incorrectly due to missing references [emp_no{f}#")); } + public void testVerifierOnMissingReferencesWithBinaryPlans() throws Exception { + // Do not assert serialization: + // This will have a LookupJoinExec, which is not serializable because it doesn't leave the coordinator. + var plan = physicalPlan(""" + FROM test + | RENAME languages AS language_code + | SORT language_code + | LOOKUP JOIN languages_lookup ON language_code + """, testData, false); + + var planWithInvalidJoinLeftSide = plan.transformUp(LookupJoinExec.class, join -> join.replaceChildren(join.right(), join.right())); + + var e = expectThrows(IllegalStateException.class, () -> physicalPlanOptimizer.verify(planWithInvalidJoinLeftSide)); + assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references from left hand side [language_code")); + + var planWithInvalidJoinRightSide = plan.transformUp( + LookupJoinExec.class, + // LookupJoinExec.rightReferences() is currently EMPTY (hack); use a HashJoinExec instead. + join -> new HashJoinExec( + join.source(), + join.left(), + join.left(), + join.leftFields(), + join.leftFields(), + join.rightFields(), + join.output() + ) + ); + + e = expectThrows(IllegalStateException.class, () -> physicalPlanOptimizer.verify(planWithInvalidJoinRightSide)); + assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references from right hand side [language_code")); + } + public void testVerifierOnDuplicateOutputAttributes() { var plan = physicalPlan(""" from test @@ -2755,7 +2806,7 @@ public void testPartialAggFoldingOutputForSyntheticAgg() { * Also note that the type converting function is removed when it does not actually convert the type, * ensuring that ReferenceAttributes are not created for the same field, and the optimization can still work. */ - public void testSpatialTypesAndStatsUseDocValues() { + public void testSpatialTypesAndStatsCentroidUseDocValues() { for (String query : new String[] { "from airports | stats centroid = st_centroid_agg(location)", "from airports | stats centroid = st_centroid_agg(to_geopoint(location))", @@ -2789,6 +2840,129 @@ public void testSpatialTypesAndStatsUseDocValues() { } } + /** + * Before local optimizations: + * + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],FINAL,[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, + * maxPosX{r}#55, maxY{r}#56, minY{r}#57],null] + * \_ExchangeExec[[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, maxPosX{r}#55, maxY{r}#56, minY{r}#57],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, reducer=[], fragment=[ + * Aggregate[STANDARD,[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent]] + * \_EsRelation[airports][abbrev{f}#44, city{f}#50, city_location{f}#51, coun..]]] + * + * After local optimizations: + * + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],FINAL,[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, + * maxPosX{r}#55, maxY{r}#56, minY{r}#57],21] + * \_ExchangeExec[[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, maxPosX{r}#55, maxY{r}#56, minY{r}#57],true] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],INITIAL,[ + * minNegX{r}#73, minPosX{r}#74, maxNegX{rb#75, maxPosX{r}#76, maxY{r}#77, minY{r}#78],21] + * \_FieldExtractExec[location{f}#48][location{f}#48] + * \_EsQueryExec[airports], indexMode[standard], query[{"exists":{"field":"location","boost":1.0}}][ + * _doc{f}#79], limit[], sort[] estimatedRowSize[25] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + *

    + * Also note that the type converting function is removed when it does not actually convert the type, + * ensuring that ReferenceAttributes are not created for the same field, and the optimization can still work. + */ + public void testSpatialTypesAndStatsExtentUseDocValues() { + for (String query : new String[] { + "from airports | stats extent = st_extent_agg(location)", + "from airports | stats extent = st_extent_agg(to_geopoint(location))", + "from airports | eval location = to_geopoint(location) | stats extent = st_extent_agg(location)" }) { + for (boolean withDocValues : new boolean[] { false, true }) { + var testData = withDocValues ? airports : airportsNoDocValues; + var plan = physicalPlan(query, testData); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan, testData.stats); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, withDocValues); + assertChildIsGeoPointExtract(withDocValues ? agg : as(agg.child(), FilterExec.class), withDocValues); + } + } + } + + /** + * Before local optimizations: + * + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],FINAL,[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, + * maxPosX{r}#55, maxY{r}#56, minY{r}#57],null] + * \_ExchangeExec[[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, maxPosX{r}#55, maxY{r}#56, minY{r}#57],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, reducer=[], fragment=[ + * Aggregate[STANDARD,[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent]] + * \_EsRelation[airports][abbrev{f}#44, city{f}#50, city_location{f}#51, coun..]]] + * + * After local optimizations: + * + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],FINAL,[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, + * maxPosX{r}#55, maxY{r}#56, minY{r}#57],21] + * \_ExchangeExec[[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, maxPosX{r}#55, maxY{r}#56, minY{r}#57],true] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],INITIAL,[ + * minNegX{r}#73, minPosX{r}#74, maxNegX{rb#75, maxPosX{r}#76, maxY{r}#77, minY{r}#78],21] + * \_FieldExtractExec[location{f}#48][location{f}#48] + * \_EsQueryExec[airports], indexMode[standard], query[{"exists":{"field":"location","boost":1.0}}][ + * _doc{f}#79], limit[], sort[] estimatedRowSize[25] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + *

    + * Also note that the type converting function is removed when it does not actually convert the type, + * ensuring that ReferenceAttributes are not created for the same field, and the optimization can still work. + */ + public void testSpatialTypesAndStatsExtentAndCentroidUseDocValues() { + for (String query : new String[] { + "from airports | stats extent = st_extent_agg(location), centroid = st_centroid_agg(location)", + "from airports | stats extent = st_extent_agg(location), centroid = st_centroid_agg(city_location)", }) { + for (boolean withDocValues : new boolean[] { false, true }) { + var testData = withDocValues ? airports : airportsNoDocValues; + var plan = physicalPlan(query, testData); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan, testData.stats); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, withDocValues); + assertChildIsGeoPointExtract(withDocValues ? agg : as(agg.child(), FilterExec.class), withDocValues); + } + } + } + /** * This test does not have real index fields, and therefor asserts that doc-values field extraction does NOT occur. * Before local optimizations: @@ -6756,7 +6930,11 @@ private static void assertAggregation( var aggFunc = assertAggregation(plan, aliasName, aggClass); var aggField = as(aggFunc.field(), Attribute.class); var spatialAgg = as(aggFunc, SpatialAggregateFunction.class); - assertThat("Expected spatial aggregation to use doc-values", spatialAgg.useDocValues(), equalTo(useDocValues)); + assertThat( + "Expected spatial aggregation to use doc-values", + spatialAgg.fieldExtractPreference(), + equalTo(useDocValues ? FieldExtractPreference.DOC_VALUES : FieldExtractPreference.NONE) + ); assertThat("", aggField.dataType(), equalTo(fieldType)); } @@ -6863,11 +7041,17 @@ private PhysicalPlan physicalPlan(String query) { } private PhysicalPlan physicalPlan(String query, TestDataSource dataSource) { + return physicalPlan(query, dataSource, true); + } + + private PhysicalPlan physicalPlan(String query, TestDataSource dataSource, boolean assertSerialization) { var logical = logicalOptimizer.optimize(dataSource.analyzer.analyze(parser.createStatement(query))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); // System.out.println(physical); - assertSerialization(physical); + if (assertSerialization) { + assertSerialization(physical); + } return physical; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java index 60b632c443f8e..1000c05282fdb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java @@ -8,10 +8,18 @@ package org.elasticsearch.xpack.esql.session; import org.apache.lucene.index.CorruptIndexException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.NoSeedNodeLeftException; @@ -20,9 +28,11 @@ import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; +import org.elasticsearch.xpack.esql.analysis.TableInfo; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; +import org.elasticsearch.xpack.esql.plan.TableIdentifier; import org.elasticsearch.xpack.esql.type.EsFieldTests; import java.util.ArrayList; @@ -32,8 +42,12 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.LongSupplier; import java.util.function.Predicate; +import java.util.stream.Collectors; +import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; +import static org.elasticsearch.xpack.esql.session.EsqlSessionCCSUtils.checkForCcsLicense; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -627,4 +641,148 @@ public void testMissingIndicesIsFatal() { } } + + public void testCheckForCcsLicense() { + final TestIndicesExpressionGrouper indicesGrouper = new TestIndicesExpressionGrouper(); + + // this seems to be used only for tracking usage of features, not for checking if a license is expired + final LongSupplier currTime = () -> System.currentTimeMillis(); + + XPackLicenseState enterpriseLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.ENTERPRISE)); + XPackLicenseState trialLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.TRIAL)); + XPackLicenseState platinumLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.PLATINUM)); + XPackLicenseState goldLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.GOLD)); + XPackLicenseState basicLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.BASIC)); + XPackLicenseState standardLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.STANDARD)); + XPackLicenseState missingLicense = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.MISSING)); + XPackLicenseState nullLicense = null; + + final XPackLicenseStatus enterpriseStatus = inactiveLicenseStatus(License.OperationMode.ENTERPRISE); + XPackLicenseState enterpriseLicenseInactive = new XPackLicenseState(currTime, enterpriseStatus); + XPackLicenseState trialLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.TRIAL)); + XPackLicenseState platinumLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.PLATINUM)); + XPackLicenseState goldLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.GOLD)); + XPackLicenseState basicLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.BASIC)); + XPackLicenseState standardLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.STANDARD)); + XPackLicenseState missingLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.MISSING)); + + // local only search does not require an enterprise license + { + List indices = new ArrayList<>(); + indices.add(new TableInfo(new TableIdentifier(EMPTY, null, randomFrom("idx", "idx1,idx2*")))); + + checkForCcsLicense(indices, indicesGrouper, enterpriseLicenseValid); + checkForCcsLicense(indices, indicesGrouper, platinumLicenseValid); + checkForCcsLicense(indices, indicesGrouper, goldLicenseValid); + checkForCcsLicense(indices, indicesGrouper, trialLicenseValid); + checkForCcsLicense(indices, indicesGrouper, basicLicenseValid); + checkForCcsLicense(indices, indicesGrouper, standardLicenseValid); + checkForCcsLicense(indices, indicesGrouper, missingLicense); + checkForCcsLicense(indices, indicesGrouper, nullLicense); + + checkForCcsLicense(indices, indicesGrouper, enterpriseLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, platinumLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, goldLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, trialLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, basicLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, standardLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, missingLicenseInactive); + } + + // cross-cluster search requires a valid (active, non-expired) enterprise license OR a valid trial license + { + List indices = new ArrayList<>(); + final String indexExprWithRemotes = randomFrom("remote:idx", "idx1,remote:idx2*,remote:logs,c*:idx4"); + if (randomBoolean()) { + indices.add(new TableInfo(new TableIdentifier(EMPTY, null, indexExprWithRemotes))); + } else { + indices.add(new TableInfo(new TableIdentifier(EMPTY, null, randomFrom("idx", "idx1,idx2*")))); + indices.add(new TableInfo(new TableIdentifier(EMPTY, null, indexExprWithRemotes))); + } + + // licenses that work + checkForCcsLicense(indices, indicesGrouper, enterpriseLicenseValid); + checkForCcsLicense(indices, indicesGrouper, trialLicenseValid); + + // all others fail --- + + // active non-expired non-Enterprise non-Trial licenses + assertLicenseCheckFails(indices, indicesGrouper, platinumLicenseValid, "active platinum license"); + assertLicenseCheckFails(indices, indicesGrouper, goldLicenseValid, "active gold license"); + assertLicenseCheckFails(indices, indicesGrouper, basicLicenseValid, "active basic license"); + assertLicenseCheckFails(indices, indicesGrouper, standardLicenseValid, "active standard license"); + assertLicenseCheckFails(indices, indicesGrouper, missingLicense, "active missing license"); + assertLicenseCheckFails(indices, indicesGrouper, nullLicense, "none"); + + // inactive/expired licenses + assertLicenseCheckFails(indices, indicesGrouper, enterpriseLicenseInactive, "expired enterprise license"); + assertLicenseCheckFails(indices, indicesGrouper, trialLicenseInactive, "expired trial license"); + assertLicenseCheckFails(indices, indicesGrouper, platinumLicenseInactive, "expired platinum license"); + assertLicenseCheckFails(indices, indicesGrouper, goldLicenseInactive, "expired gold license"); + assertLicenseCheckFails(indices, indicesGrouper, basicLicenseInactive, "expired basic license"); + assertLicenseCheckFails(indices, indicesGrouper, standardLicenseInactive, "expired standard license"); + assertLicenseCheckFails(indices, indicesGrouper, missingLicenseInactive, "expired missing license"); + } + } + + private XPackLicenseStatus activeLicenseStatus(License.OperationMode operationMode) { + return new XPackLicenseStatus(operationMode, true, null); + } + + private XPackLicenseStatus inactiveLicenseStatus(License.OperationMode operationMode) { + return new XPackLicenseStatus(operationMode, false, "License Expired 123"); + } + + private void assertLicenseCheckFails( + List indices, + TestIndicesExpressionGrouper indicesGrouper, + XPackLicenseState licenseState, + String expectedErrorMessageSuffix + ) { + ElasticsearchStatusException e = expectThrows( + ElasticsearchStatusException.class, + () -> checkForCcsLicense(indices, indicesGrouper, licenseState) + ); + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat( + e.getMessage(), + equalTo( + "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: " + expectedErrorMessageSuffix + ) + ); + } + + static class TestIndicesExpressionGrouper implements IndicesExpressionGrouper { + @Override + public Map groupIndices(IndicesOptions indicesOptions, String[] indexExpressions) { + final Map originalIndicesMap = new HashMap<>(); + final String localKey = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + + for (String expr : indexExpressions) { + assertFalse(Strings.isNullOrBlank(expr)); + String[] split = expr.split(":", 2); + assertTrue("Bad index expression: " + expr, split.length < 3); + String clusterAlias; + String indexExpr; + if (split.length == 1) { + clusterAlias = localKey; + indexExpr = expr; + } else { + clusterAlias = split[0]; + indexExpr = split[1]; + + } + OriginalIndices currIndices = originalIndicesMap.get(clusterAlias); + if (currIndices == null) { + originalIndicesMap.put(clusterAlias, new OriginalIndices(new String[] { indexExpr }, indicesOptions)); + } else { + List indicesList = Arrays.stream(currIndices.indices()).collect(Collectors.toList()); + indicesList.add(indexExpr); + originalIndicesMap.put(clusterAlias, new OriginalIndices(indicesList.toArray(new String[0]), indicesOptions)); + } + } + return originalIndicesMap; + } + } + } diff --git a/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ilm/CCRIndexLifecycleIT.java b/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ilm/CCRIndexLifecycleIT.java index 5168cd11eb172..a5d966873dda1 100644 --- a/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ilm/CCRIndexLifecycleIT.java +++ b/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ilm/CCRIndexLifecycleIT.java @@ -38,7 +38,6 @@ import java.util.Optional; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ilm.ShrinkIndexNameSupplier.SHRUNKEN_INDEX_PREFIX; import static org.hamcrest.Matchers.equalTo; @@ -762,8 +761,8 @@ private void assertDocumentExists(RestClient client, String index, String id) th } private void createNewSingletonPolicy(String policyName, String phaseName, LifecycleAction action, TimeValue after) throws IOException { - Phase phase = new Phase(phaseName, after, singletonMap(action.getWriteableName(), action)); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, singletonMap(phase.getName(), phase)); + Phase phase = new Phase(phaseName, after, Map.of(action.getWriteableName(), action)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, Map.of(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java index 60e71b095039e..811d07a436677 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java @@ -46,7 +46,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createNewSingletonPolicy; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createPolicy; @@ -101,11 +100,11 @@ public void testMigrateToDataTiersAction() throws Exception { Map warmActions = new HashMap<>(); warmActions.put(SetPriorityAction.NAME, new SetPriorityAction(50)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1, null)); - warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, singletonMap("data", "warm"), null, null)); + warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, Map.of("data", "warm"), null, null)); warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null, false)); Map coldActions = new HashMap<>(); coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); - coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, singletonMap("data", "cold"))); + coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, Map.of("data", "cold"))); createPolicy( client(), @@ -114,7 +113,7 @@ public void testMigrateToDataTiersAction() throws Exception { new Phase("warm", TimeValue.ZERO, warmActions), new Phase("cold", TimeValue.timeValueDays(100), coldActions), null, - new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) ); createIndexWithSettings( @@ -377,11 +376,11 @@ public void testMigrationDryRun() throws Exception { Map warmActions = new HashMap<>(); warmActions.put(SetPriorityAction.NAME, new SetPriorityAction(50)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1, null)); - warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, singletonMap("data", "warm"), null, null)); + warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, Map.of("data", "warm"), null, null)); warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null, false)); Map coldActions = new HashMap<>(); coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); - coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, singletonMap("data", "cold"))); + coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, Map.of("data", "cold"))); createPolicy( client(), @@ -390,7 +389,7 @@ public void testMigrationDryRun() throws Exception { new Phase("warm", TimeValue.ZERO, warmActions), new Phase("cold", TimeValue.timeValueDays(100), coldActions), null, - new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) ); createIndexWithSettings( diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java index 3949139db033b..a1c7ebc2d8b2c 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java @@ -41,7 +41,6 @@ import java.io.IOException; import java.io.InputStream; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -50,7 +49,6 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; import static org.elasticsearch.test.ESTestCase.randomBoolean; @@ -154,8 +152,8 @@ public static void createNewSingletonPolicy( LifecycleAction action, TimeValue after ) throws IOException { - Phase phase = new Phase(phaseName, after, singletonMap(action.getWriteableName(), action)); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, singletonMap(phase.getName(), phase)); + Phase phase = new Phase(phaseName, after, Map.of(action.getWriteableName(), action)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, Map.of(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); @@ -202,7 +200,7 @@ public static void createFullPolicy(RestClient client, String policyName, TimeVa new AllocateAction( 1, null, - singletonMap("_name", "javaRestTest-0,javaRestTest-1," + "javaRestTest-2," + "javaRestTest-3"), + Map.of("_name", "javaRestTest-0,javaRestTest-1," + "javaRestTest-2," + "javaRestTest-3"), null, null ) @@ -215,7 +213,7 @@ public static void createFullPolicy(RestClient client, String policyName, TimeVa new AllocateAction( 0, null, - singletonMap("_name", "javaRestTest-0,javaRestTest-1," + "javaRestTest-2," + "javaRestTest-3"), + Map.of("_name", "javaRestTest-0,javaRestTest-1," + "javaRestTest-2," + "javaRestTest-3"), null, null ) @@ -224,7 +222,7 @@ public static void createFullPolicy(RestClient client, String policyName, TimeVa phases.put("hot", new Phase("hot", hotTime, hotActions)); phases.put("warm", new Phase("warm", TimeValue.ZERO, warmActions)); phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); - phases.put("delete", new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE))); + phases.put("delete", new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, phases); // PUT policy XContentBuilder builder = jsonBuilder(); @@ -300,7 +298,7 @@ public static Map getOnlyIndexSettings(RestClient client, String Map responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); Map indexSettings = (Map) responseMap.get(index); if (indexSettings == null) { - return Collections.emptyMap(); + return Map.of(); } return (Map) indexSettings.get("settings"); } diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ChangePolicyForIndexIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ChangePolicyForIndexIT.java index 7f75b010346ad..370e00785e843 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ChangePolicyForIndexIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ChangePolicyForIndexIT.java @@ -32,7 +32,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createNewSingletonPolicy; @@ -67,7 +66,7 @@ public void testChangePolicyForIndex() throws Exception { new Phase( "hot", TimeValue.ZERO, - singletonMap(RolloverAction.NAME, new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)) + Map.of(RolloverAction.NAME, new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)) ) ); phases1.put( @@ -75,7 +74,7 @@ public void testChangePolicyForIndex() throws Exception { new Phase( "warm", TimeValue.ZERO, - singletonMap(AllocateAction.NAME, new AllocateAction(1, null, singletonMap("_name", "foobarbaz"), null, null)) + Map.of(AllocateAction.NAME, new AllocateAction(1, null, Map.of("_name", "foobarbaz"), null, null)) ) ); LifecyclePolicy lifecyclePolicy1 = new LifecyclePolicy("policy_1", phases1); @@ -85,7 +84,7 @@ public void testChangePolicyForIndex() throws Exception { new Phase( "hot", TimeValue.ZERO, - singletonMap(RolloverAction.NAME, new RolloverAction(null, null, null, 1000L, null, null, null, null, null, null)) + Map.of(RolloverAction.NAME, new RolloverAction(null, null, null, 1000L, null, null, null, null, null, null)) ) ); phases2.put( @@ -93,15 +92,9 @@ public void testChangePolicyForIndex() throws Exception { new Phase( "warm", TimeValue.ZERO, - singletonMap( + Map.of( AllocateAction.NAME, - new AllocateAction( - 1, - null, - singletonMap("_name", "javaRestTest-0,javaRestTest-1,javaRestTest-2,javaRestTest-3"), - null, - null - ) + new AllocateAction(1, null, Map.of("_name", "javaRestTest-0,javaRestTest-1,javaRestTest-2,javaRestTest-3"), null, null) ) ) ); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java index 2b722a6555a08..4c53d711ffdef 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java @@ -58,7 +58,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createFullPolicy; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings; @@ -219,7 +218,7 @@ public void testAllocateOnlyAllocation() throws Exception { Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ); String allocateNodeName = "javaRestTest-0,javaRestTest-1,javaRestTest-2,javaRestTest-3"; - AllocateAction allocateAction = new AllocateAction(null, null, singletonMap("_name", allocateNodeName), null, null); + AllocateAction allocateAction = new AllocateAction(null, null, Map.of("_name", allocateNodeName), null, null); String endPhase = randomFrom("warm", "cold"); createNewSingletonPolicy(client(), policy, endPhase, allocateAction); updatePolicy(client(), index, policy); @@ -978,7 +977,7 @@ public void testHaltAtEndOfPhase() throws Exception { hotActions.put(SetPriorityAction.NAME, new SetPriorityAction(100)); Map phases = new HashMap<>(); phases.put("hot", new Phase("hot", TimeValue.ZERO, hotActions)); - phases.put("delete", new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE))); + phases.put("delete", new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy XContentBuilder builder = jsonBuilder(); @@ -1004,7 +1003,7 @@ public void testDeleteActionDoesntDeleteSearchableSnapshot() throws Exception { phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); phases.put( "delete", - new Phase("delete", TimeValue.timeValueMillis(10000), singletonMap(DeleteAction.NAME, DeleteAction.NO_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.timeValueMillis(10000), Map.of(DeleteAction.NAME, DeleteAction.NO_SNAPSHOT_DELETE)) ); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java index f00b5b566c156..15e59a1593337 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java @@ -47,7 +47,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createComposableTemplate; @@ -185,7 +184,7 @@ public void testDeleteActionDeletesSearchableSnapshot() throws Exception { Map coldActions = Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo)); Map phases = new HashMap<>(); phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); - phases.put("delete", new Phase("delete", TimeValue.timeValueMillis(10000), singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE))); + phases.put("delete", new Phase("delete", TimeValue.timeValueMillis(10000), Map.of(DeleteAction.NAME, WITH_SNAPSHOT_DELETE))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy XContentBuilder builder = jsonBuilder(); @@ -455,7 +454,7 @@ public void testIdenticalSearchableSnapshotActionIsNoop() throws Exception { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null ); @@ -516,12 +515,12 @@ public void testConvertingSearchableSnapshotFromFullToPartial() throws Exception new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null ); @@ -586,7 +585,7 @@ public void testResumingSearchableSnapshotFromFullToPartial() throws Exception { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null, null @@ -600,12 +599,12 @@ public void testResumingSearchableSnapshotFromFullToPartial() throws Exception { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null ); @@ -664,14 +663,14 @@ public void testResumingSearchableSnapshotFromFullToPartial() throws Exception { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), - new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) ); assertBusy(() -> { logger.info("--> waiting for [{}] to be deleted...", partiallyMountedIndexName); @@ -695,7 +694,7 @@ public void testResumingSearchableSnapshotFromPartialToFull() throws Exception { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null, null @@ -710,12 +709,12 @@ public void testResumingSearchableSnapshotFromPartialToFull() throws Exception { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null ); @@ -775,10 +774,10 @@ public void testResumingSearchableSnapshotFromPartialToFull() throws Exception { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null, - new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) ); assertBusy(() -> { logger.info("--> waiting for [{}] to be deleted...", restoredPartiallyMountedIndexName); @@ -803,12 +802,12 @@ public void testSecondSearchableSnapshotUsingDifferentRepoThrows() throws Except new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(secondRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(secondRepo, randomBoolean())) ), null ) @@ -934,12 +933,12 @@ public void testSearchableSnapshotTotalShardsPerNode() throws Exception { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean(), totalShardsPerNode)) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean(), totalShardsPerNode)) ), null ); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java index d2f2dbbd0c9fb..2fecf3c617ccd 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java @@ -39,7 +39,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings; @@ -286,7 +285,7 @@ public void testSetSingleNodeAllocationRetriesUntilItSucceeds() throws Exception TimeValue.ZERO, Map.of(migrateAction.getWriteableName(), migrateAction, shrinkAction.getWriteableName(), shrinkAction) ); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, singletonMap(phase.getName(), phase)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); diff --git a/x-pack/plugin/ilm/qa/with-security/src/javaRestTest/java/org/elasticsearch/xpack/security/PermissionsIT.java b/x-pack/plugin/ilm/qa/with-security/src/javaRestTest/java/org/elasticsearch/xpack/security/PermissionsIT.java index 9460500177616..12dede7067b03 100644 --- a/x-pack/plugin/ilm/qa/with-security/src/javaRestTest/java/org/elasticsearch/xpack/security/PermissionsIT.java +++ b/x-pack/plugin/ilm/qa/with-security/src/javaRestTest/java/org/elasticsearch/xpack/security/PermissionsIT.java @@ -45,7 +45,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -225,8 +224,8 @@ public void testWhenUserLimitedByOnlyAliasOfIndexCanWriteToIndexWhichWasRolledov } private void createNewSingletonPolicy(RestClient client, String policy, String phaseName, LifecycleAction action) throws IOException { - Phase phase = new Phase(phaseName, TimeValue.ZERO, singletonMap(action.getWriteableName(), action)); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, singletonMap(phase.getName(), phase)); + Phase phase = new Phase(phaseName, TimeValue.ZERO, Map.of(action.getWriteableName(), action)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java index 55daa8104c12a..f25028824b56e 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; import org.junit.Before; -import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Locale; @@ -65,7 +64,7 @@ public void refreshDataStreamAndPolicy() { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, Ccr.class); + return List.of(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, Ccr.class); } @Override diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java index 7a0e00e5c4147..6d409bf474cfc 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java @@ -30,9 +30,8 @@ import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; import org.junit.Before; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -57,7 +56,7 @@ public void refreshDataStreamAndPolicy() { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class); + return List.of(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class); } @Override @@ -100,9 +99,9 @@ public void testIndexDataTierMigration() throws Exception { logger.info("starting a cold data node"); internalCluster().startNode(coldNode(Settings.EMPTY)); - Phase hotPhase = new Phase("hot", TimeValue.ZERO, Collections.emptyMap()); - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Collections.emptyMap()); - Phase coldPhase = new Phase("cold", TimeValue.ZERO, Collections.emptyMap()); + Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of()); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of()); + Phase coldPhase = new Phase("cold", TimeValue.ZERO, Map.of()); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of("hot", hotPhase, "warm", warmPhase, "cold", coldPhase)); PutLifecycleRequest putLifecycleRequest = new PutLifecycleRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, lifecyclePolicy); assertAcked(client().execute(ILMActions.PUT, putLifecycleRequest).get()); @@ -161,9 +160,9 @@ public void testUserOptsOutOfTierMigration() throws Exception { logger.info("starting a cold data node"); internalCluster().startNode(coldNode(Settings.EMPTY)); - Phase hotPhase = new Phase("hot", TimeValue.ZERO, Collections.emptyMap()); - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Collections.emptyMap()); - Phase coldPhase = new Phase("cold", TimeValue.ZERO, Collections.emptyMap()); + Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of()); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of()); + Phase coldPhase = new Phase("cold", TimeValue.ZERO, Map.of()); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of("hot", hotPhase, "warm", warmPhase, "cold", coldPhase)); PutLifecycleRequest putLifecycleRequest = new PutLifecycleRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, lifecyclePolicy); assertAcked(client().execute(ILMActions.PUT, putLifecycleRequest).get()); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java index b443c769407c5..2c4c1c9e20bb6 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java @@ -33,10 +33,9 @@ import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -49,7 +48,7 @@ public class ILMMultiNodeIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, DataStreamsPlugin.class, IndexLifecycle.class, Ccr.class); + return List.of(LocalStateCompositeXPackPlugin.class, DataStreamsPlugin.class, IndexLifecycle.class, Ccr.class); } @Override @@ -69,9 +68,9 @@ public void testShrinkOnTiers() throws Exception { ensureGreen(); RolloverAction rolloverAction = new RolloverAction(null, null, null, 1L, null, null, null, null, null, null); - Phase hotPhase = new Phase("hot", TimeValue.ZERO, Collections.singletonMap(rolloverAction.getWriteableName(), rolloverAction)); + Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of(rolloverAction.getWriteableName(), rolloverAction)); ShrinkAction shrinkAction = new ShrinkAction(1, null, false); - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Collections.singletonMap(shrinkAction.getWriteableName(), shrinkAction)); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of(shrinkAction.getWriteableName(), shrinkAction)); Map phases = new HashMap<>(); phases.put(hotPhase.getName(), hotPhase); phases.put(warmPhase.getName(), warmPhase); @@ -89,7 +88,7 @@ public void testShrinkOnTiers() throws Exception { ); ComposableIndexTemplate template = ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList(index)) + .indexPatterns(List.of(index)) .template(t) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(); @@ -121,12 +120,12 @@ public void testShrinkOnTiers() throws Exception { } public void startHotOnlyNode() { - Settings nodeSettings = Settings.builder().putList("node.roles", Arrays.asList("master", "data_hot", "ingest")).build(); + Settings nodeSettings = Settings.builder().putList("node.roles", List.of("master", "data_hot", "ingest")).build(); internalCluster().startNode(nodeSettings); } public void startWarmOnlyNode() { - Settings nodeSettings = Settings.builder().putList("node.roles", Arrays.asList("master", "data_warm", "ingest")).build(); + Settings nodeSettings = Settings.builder().putList("node.roles", List.of("master", "data_warm", "ingest")).build(); internalCluster().startNode(nodeSettings); } } diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java index e02dd5fe45676..b91a309a23ae5 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java @@ -34,10 +34,9 @@ import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -50,7 +49,7 @@ public class ILMMultiNodeWithCCRDisabledIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, DataStreamsPlugin.class, IndexLifecycle.class, Ccr.class); + return List.of(LocalStateCompositeXPackPlugin.class, DataStreamsPlugin.class, IndexLifecycle.class, Ccr.class); } @Override @@ -75,7 +74,7 @@ public void testShrinkOnTiers() throws Exception { actions.put(shrinkAction.getWriteableName(), shrinkAction); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy("shrink-policy", Collections.singletonMap(hotPhase.getName(), hotPhase)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy("shrink-policy", Map.of(hotPhase.getName(), hotPhase)); client().execute(ILMActions.PUT, new PutLifecycleRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, lifecyclePolicy)).get(); Template t = new Template( @@ -89,7 +88,7 @@ public void testShrinkOnTiers() throws Exception { ); ComposableIndexTemplate template = ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList(index)) + .indexPatterns(List.of(index)) .template(t) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(); @@ -121,12 +120,12 @@ public void testShrinkOnTiers() throws Exception { } public void startHotOnlyNode() { - Settings nodeSettings = Settings.builder().putList("node.roles", Arrays.asList("master", "data_hot", "ingest")).build(); + Settings nodeSettings = Settings.builder().putList("node.roles", List.of("master", "data_hot", "ingest")).build(); internalCluster().startNode(nodeSettings); } public void startWarmOnlyNode() { - Settings nodeSettings = Settings.builder().putList("node.roles", Arrays.asList("master", "data_warm", "ingest")).build(); + Settings nodeSettings = Settings.builder().putList("node.roles", List.of("master", "data_warm", "ingest")).build(); internalCluster().startNode(nodeSettings); } } diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java index d06a9f9cc19b1..644f88dc533b9 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java @@ -56,9 +56,7 @@ import java.io.IOException; import java.time.Instant; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -112,7 +110,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, TestILMPlugin.class); + return List.of(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, TestILMPlugin.class); } @Before @@ -128,9 +126,9 @@ public void init() { Step.StepKey compKey = new Step.StepKey("mock", "complete", "complete"); steps.add(new ObservableClusterStateWaitStep(key, compKey)); steps.add(new PhaseCompleteStep(compKey, null)); - Map actions = Collections.singletonMap(ObservableAction.NAME, OBSERVABLE_ACTION); + Map actions = Map.of(ObservableAction.NAME, OBSERVABLE_ACTION); mockPhase = new Phase("mock", TimeValue.timeValueSeconds(0), actions); - Map phases = Collections.singletonMap("mock", mockPhase); + Map phases = Map.of("mock", mockPhase); lifecyclePolicy = newLockableLifecyclePolicy("test", phases); } @@ -311,7 +309,7 @@ public void testExplainExecution() throws Exception { updateIndexSettings(Settings.builder().put("index.lifecycle.test.complete", true), "test"); { - Phase phase = new Phase("mock", TimeValue.ZERO, Collections.singletonMap("TEST_ACTION", OBSERVABLE_ACTION)); + Phase phase = new Phase("mock", TimeValue.ZERO, Map.of("TEST_ACTION", OBSERVABLE_ACTION)); PhaseExecutionInfo expectedExecutionInfo = new PhaseExecutionInfo(lifecyclePolicy.getName(), phase, 1L, actualModifiedDate); assertBusy(() -> { IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); @@ -526,12 +524,12 @@ public List> getSettings() { Setting.Property.Dynamic, Setting.Property.IndexScope ); - return Collections.singletonList(COMPLETE_SETTING); + return List.of(COMPLETE_SETTING); } @Override public List getNamedXContent() { - return Arrays.asList(new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ObservableAction.NAME), (p) -> { + return List.of(new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ObservableAction.NAME), (p) -> { MockAction.parse(p); return OBSERVABLE_ACTION; })); @@ -539,7 +537,7 @@ public List getNamedXContent() { @Override public List getNamedWriteables() { - return Arrays.asList( + return List.of( new NamedWriteableRegistry.Entry(LifecycleType.class, LockableLifecycleType.TYPE, (in) -> LockableLifecycleType.INSTANCE), new NamedWriteableRegistry.Entry(LifecycleAction.class, ObservableAction.NAME, ObservableAction::readObservableAction), new NamedWriteableRegistry.Entry( diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java index 9efe46402428c..a36b74d9932d9 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java @@ -251,7 +251,7 @@ static List migrateIlmPolicies( ) { IndexLifecycleMetadata currentLifecycleMetadata = currentState.metadata().custom(IndexLifecycleMetadata.TYPE); if (currentLifecycleMetadata == null) { - return Collections.emptyList(); + return List.of(); } List migratedPolicies = new ArrayList<>(); @@ -827,7 +827,6 @@ public MigratedEntities( this.migratedPolicies = Collections.unmodifiableList(migratedPolicies); this.migratedTemplates = migratedTemplates; } - } /** diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java index 42d1955f0d453..c5d367804db42 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java @@ -41,7 +41,6 @@ import org.elasticsearch.xpack.core.ilm.WaitForRolloverReadyStep; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -219,8 +218,8 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources GREEN, "No Index Lifecycle Management policies configured", createDetails(verbose, ilmMetadata, currentMode), - Collections.emptyList(), - Collections.emptyList() + List.of(), + List.of() ); } else if (currentMode != OperationMode.RUNNING) { return createIndicator( @@ -238,8 +237,8 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources GREEN, "Index Lifecycle Management is running", createDetails(verbose, ilmMetadata, currentMode), - Collections.emptyList(), - Collections.emptyList() + List.of(), + List.of() ); } else { return createIndicator( diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java index e59bde7253051..71d61caa5fe31 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java @@ -54,7 +54,6 @@ import java.io.Closeable; import java.time.Clock; import java.util.Collection; -import java.util.Collections; import java.util.Map; import java.util.Set; import java.util.function.LongSupplier; @@ -500,7 +499,7 @@ static Set indicesOnShuttingDownNodesInDangerousStep(ClusterState state, SingleNodeShutdownMetadata.Type.REPLACE ); if (shutdownNodes.isEmpty()) { - return Collections.emptySet(); + return Set.of(); } Set indicesPreventingShutdown = state.metadata() diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetLifecycleAction.java index f4598727d6123..5fa0f881559fb 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetLifecycleAction.java @@ -32,7 +32,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -71,7 +70,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A IndexLifecycleMetadata metadata = clusterService.state().metadata().custom(IndexLifecycleMetadata.TYPE); if (metadata == null) { if (request.getPolicyNames().length == 0) { - listener.onResponse(new Response(Collections.emptyList())); + listener.onResponse(new Response(List.of())); } else { listener.onFailure( new ResourceNotFoundException("Lifecycle policy not found: {}", Arrays.toString(request.getPolicyNames())) diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java index 977887a0487f3..efd54e05cb153 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java @@ -18,7 +18,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; -import java.util.Collections; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE; @@ -110,7 +110,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } private static String exceptionToString(Exception exception) { - Params stacktraceParams = new MapParams(Collections.singletonMap(REST_EXCEPTION_SKIP_STACK_TRACE, "false")); + Params stacktraceParams = new MapParams(Map.of(REST_EXCEPTION_SKIP_STACK_TRACE, "false")); String exceptionString; try (XContentBuilder causeXContentBuilder = JsonXContent.contentBuilder()) { causeXContentBuilder.startObject(); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java index 570c2f5231acf..2ee133b6292bd 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java @@ -48,7 +48,6 @@ import java.io.ByteArrayInputStream; import java.nio.charset.StandardCharsets; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -118,10 +117,7 @@ public void testMigrateIlmPolicyForIndexWithoutILMMetadata() { Metadata.builder() .putCustom( IndexLifecycleMetadata.TYPE, - new IndexLifecycleMetadata( - Collections.singletonMap(policyMetadata.getName(), policyMetadata), - OperationMode.STOPPED - ) + new IndexLifecycleMetadata(Map.of(policyMetadata.getName(), policyMetadata), OperationMode.STOPPED) ) .put(IndexMetadata.builder(indexName).settings(getBaseIndexSettings())) .build() @@ -176,7 +172,7 @@ public void testMigrateIlmPolicyForPhaseWithDeactivatedMigrateAction() { ); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -186,10 +182,7 @@ public void testMigrateIlmPolicyForPhaseWithDeactivatedMigrateAction() { Metadata.builder() .putCustom( IndexLifecycleMetadata.TYPE, - new IndexLifecycleMetadata( - Collections.singletonMap(policyMetadata.getName(), policyMetadata), - OperationMode.STOPPED - ) + new IndexLifecycleMetadata(Map.of(policyMetadata.getName(), policyMetadata), OperationMode.STOPPED) ) .put(IndexMetadata.builder(indexName).settings(getBaseIndexSettings())) .build() @@ -245,10 +238,7 @@ public void testMigrateIlmPolicyRefreshesCachedPhase() { Metadata.builder() .putCustom( IndexLifecycleMetadata.TYPE, - new IndexLifecycleMetadata( - Collections.singletonMap(policyMetadata.getName(), policyMetadata), - OperationMode.STOPPED - ) + new IndexLifecycleMetadata(Map.of(policyMetadata.getName(), policyMetadata), OperationMode.STOPPED) ) .put(indexMetadata) .build() @@ -302,10 +292,7 @@ public void testMigrateIlmPolicyRefreshesCachedPhase() { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap( - policyMetadataWithTotalShardsPerNode.getName(), - policyMetadataWithTotalShardsPerNode - ), + Map.of(policyMetadataWithTotalShardsPerNode.getName(), policyMetadataWithTotalShardsPerNode), OperationMode.STOPPED ) ) @@ -352,10 +339,7 @@ public void testMigrateIlmPolicyRefreshesCachedPhase() { Metadata.builder() .putCustom( IndexLifecycleMetadata.TYPE, - new IndexLifecycleMetadata( - Collections.singletonMap(policyMetadata.getName(), policyMetadata), - OperationMode.STOPPED - ) + new IndexLifecycleMetadata(Map.of(policyMetadata.getName(), policyMetadata), OperationMode.STOPPED) ) .put(indexMetadata) .build() @@ -406,10 +390,7 @@ public void testMigrateIlmPolicyRefreshesCachedPhase() { Metadata.builder() .putCustom( IndexLifecycleMetadata.TYPE, - new IndexLifecycleMetadata( - Collections.singletonMap(policyMetadata.getName(), policyMetadata), - OperationMode.STOPPED - ) + new IndexLifecycleMetadata(Map.of(policyMetadata.getName(), policyMetadata), OperationMode.STOPPED) ) .put(indexMetadata) .build() @@ -456,10 +437,7 @@ public void testMigrateIlmPolicyRefreshesCachedPhase() { Metadata.builder() .putCustom( IndexLifecycleMetadata.TYPE, - new IndexLifecycleMetadata( - Collections.singletonMap(policyMetadata.getName(), policyMetadata), - OperationMode.STOPPED - ) + new IndexLifecycleMetadata(Map.of(policyMetadata.getName(), policyMetadata), OperationMode.STOPPED) ) .put(indexMetadata) .build() @@ -1008,7 +986,7 @@ public void testMigrateToDataTiersRouting() { ); LifecyclePolicyMetadata policyWithDataAttribute = new LifecyclePolicyMetadata( policyToMigrate, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -1026,7 +1004,7 @@ public void testMigrateToDataTiersRouting() { ); LifecyclePolicyMetadata policyWithOtherAttribute = new LifecyclePolicyMetadata( shouldntBeMigratedPolicy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -1215,7 +1193,7 @@ public void testDryRunDoesntRequireILMStopped() { public void testMigrationDoesNotRemoveComposableTemplates() { ComposableIndexTemplate composableIndexTemplate = ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList("*")) + .indexPatterns(List.of("*")) .template(new Template(Settings.builder().put(DATA_ROUTING_REQUIRE_SETTING, "hot").build(), null, null)) .build(); @@ -1285,7 +1263,7 @@ private LifecyclePolicyMetadata getWarmColdPolicyMeta( new Phase("cold", TimeValue.ZERO, Map.of(coldAllocateAction.getWriteableName(), coldAllocateAction)) ) ); - return new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()); + return new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()); } public void testMigrateLegacyIndexTemplates() { diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/ExecuteStepsUpdateTaskTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/ExecuteStepsUpdateTaskTests.java index b3146e81d08fc..06d11bff069fd 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/ExecuteStepsUpdateTaskTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/ExecuteStepsUpdateTaskTests.java @@ -42,9 +42,8 @@ import org.mockito.Mockito; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; @@ -91,42 +90,33 @@ public void prepareState() throws IOException { Phase mixedPhase = new Phase( "first_phase", TimeValue.ZERO, - Collections.singletonMap(MockAction.NAME, new MockAction(Arrays.asList(firstStep, secondStep, thirdStep))) + Map.of(MockAction.NAME, new MockAction(List.of(firstStep, secondStep, thirdStep))) ); Phase allClusterPhase = new Phase( "first_phase", TimeValue.ZERO, - Collections.singletonMap(MockAction.NAME, new MockAction(Arrays.asList(firstStep, allClusterSecondStep))) + Map.of(MockAction.NAME, new MockAction(List.of(firstStep, allClusterSecondStep))) ); Phase invalidPhase = new Phase( "invalid_phase", TimeValue.ZERO, - Collections.singletonMap( - MockAction.NAME, - new MockAction(Arrays.asList(new MockClusterStateActionStep(firstStepKey, invalidStepKey))) - ) - ); - LifecyclePolicy mixedPolicy = newTestLifecyclePolicy(mixedPolicyName, Collections.singletonMap(mixedPhase.getName(), mixedPhase)); - LifecyclePolicy allClusterPolicy = newTestLifecyclePolicy( - allClusterPolicyName, - Collections.singletonMap(allClusterPhase.getName(), allClusterPhase) - ); - LifecyclePolicy invalidPolicy = newTestLifecyclePolicy( - invalidPolicyName, - Collections.singletonMap(invalidPhase.getName(), invalidPhase) + Map.of(MockAction.NAME, new MockAction(List.of(new MockClusterStateActionStep(firstStepKey, invalidStepKey)))) ); + LifecyclePolicy mixedPolicy = newTestLifecyclePolicy(mixedPolicyName, Map.of(mixedPhase.getName(), mixedPhase)); + LifecyclePolicy allClusterPolicy = newTestLifecyclePolicy(allClusterPolicyName, Map.of(allClusterPhase.getName(), allClusterPhase)); + LifecyclePolicy invalidPolicy = newTestLifecyclePolicy(invalidPolicyName, Map.of(invalidPhase.getName(), invalidPhase)); Map policyMap = new HashMap<>(); policyMap.put( mixedPolicyName, - new LifecyclePolicyMetadata(mixedPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + new LifecyclePolicyMetadata(mixedPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); policyMap.put( allClusterPolicyName, - new LifecyclePolicyMetadata(allClusterPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + new LifecyclePolicyMetadata(allClusterPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); policyMap.put( invalidPolicyName, - new LifecyclePolicyMetadata(invalidPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + new LifecyclePolicyMetadata(invalidPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); policyStepsRegistry = new PolicyStepsRegistry(NamedXContentRegistry.EMPTY, client, null); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java index 9e2a67caac253..7a37aaba96c18 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -177,7 +176,7 @@ public void testIsYellowWhenNotRunningAndPoliciesConfigured() { YELLOW, "Index Lifecycle Management is not running", new SimpleHealthIndicatorDetails(Map.of("ilm_status", status, "policies", 1, "stagnating_indices", 0)), - Collections.singletonList( + List.of( new HealthIndicatorImpact( NAME, IlmHealthIndicatorService.AUTOMATION_DISABLED_IMPACT_ID, @@ -251,7 +250,7 @@ public void testSkippingFieldsWhenVerboseIsFalse() { YELLOW, "Index Lifecycle Management is not running", HealthIndicatorDetails.EMPTY, - Collections.singletonList( + List.of( new HealthIndicatorImpact( NAME, IlmHealthIndicatorService.AUTOMATION_DISABLED_IMPACT_ID, diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInfoTransportActionTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInfoTransportActionTests.java index d81faf6a398d7..4e8d7440eb773 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInfoTransportActionTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInfoTransportActionTests.java @@ -33,7 +33,6 @@ import org.mockito.Mockito; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -75,18 +74,18 @@ public void testUsageStats() throws Exception { indexPolicies.put("index_3", policy1Name); indexPolicies.put("index_4", policy1Name); indexPolicies.put("index_5", policy3Name); - LifecyclePolicy policy1 = new LifecyclePolicy(policy1Name, Collections.emptyMap()); + LifecyclePolicy policy1 = new LifecyclePolicy(policy1Name, Map.of()); policies.add(policy1); - PolicyStats policy1Stats = new PolicyStats(Collections.emptyMap(), 4); + PolicyStats policy1Stats = new PolicyStats(Map.of(), 4); Map phases1 = new HashMap<>(); LifecyclePolicy policy2 = new LifecyclePolicy(policy2Name, phases1); policies.add(policy2); - PolicyStats policy2Stats = new PolicyStats(Collections.emptyMap(), 0); + PolicyStats policy2Stats = new PolicyStats(Map.of(), 0); - LifecyclePolicy policy3 = new LifecyclePolicy(policy3Name, Collections.emptyMap()); + LifecyclePolicy policy3 = new LifecyclePolicy(policy3Name, Map.of()); policies.add(policy3); - PolicyStats policy3Stats = new PolicyStats(Collections.emptyMap(), 1); + PolicyStats policy3Stats = new PolicyStats(Map.of(), 1); ClusterState clusterState = buildClusterState(policies, indexPolicies); Mockito.when(clusterService.state()).thenReturn(clusterState); @@ -110,7 +109,7 @@ public void testUsageStats() throws Exception { private ClusterState buildClusterState(List lifecyclePolicies, Map indexPolicies) { Map lifecyclePolicyMetadatasMap = lifecyclePolicies.stream() - .map(p -> new LifecyclePolicyMetadata(p, Collections.emptyMap(), 1, 0L)) + .map(p -> new LifecyclePolicyMetadata(p, Map.of(), 1, 0L)) .collect(Collectors.toMap(LifecyclePolicyMetadata::getName, Function.identity())); IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(lifecyclePolicyMetadatasMap, OperationMode.RUNNING); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleMetadataTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleMetadataTests.java index e757488c2690e..ece83fe6bc437 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleMetadataTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleMetadataTests.java @@ -44,8 +44,6 @@ import org.elasticsearch.xpack.core.ilm.WaitForSnapshotAction; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.SortedMap; @@ -63,10 +61,7 @@ protected IndexLifecycleMetadata createTestInstance() { Map policies = Maps.newMapWithExpectedSize(numPolicies); for (int i = 0; i < numPolicies; i++) { LifecyclePolicy policy = randomTimeseriesLifecyclePolicy(randomAlphaOfLength(4) + i); - policies.put( - policy.getName(), - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policies.put(policy.getName(), new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); } return new IndexLifecycleMetadata(policies, randomFrom(OperationMode.values())); } @@ -84,7 +79,7 @@ protected Reader instanceReader() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList( + List.of( new NamedWriteableRegistry.Entry( LifecycleType.class, TimeseriesLifecycleType.TYPE, @@ -111,7 +106,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { protected NamedXContentRegistry xContentRegistry() { List entries = new ArrayList<>(ClusterModule.getNamedXWriteables()); entries.addAll( - Arrays.asList( + List.of( new NamedXContentRegistry.Entry( LifecycleType.class, new ParseField(TimeseriesLifecycleType.TYPE), @@ -155,7 +150,7 @@ protected Metadata.Custom mutateInstance(Custom instance) { policyName, new LifecyclePolicyMetadata( randomTimeseriesLifecyclePolicy(policyName), - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ) @@ -192,9 +187,9 @@ public static IndexLifecycleMetadata createTestInstance(int numPolicies, Operati Map phases = Maps.newMapWithExpectedSize(numberPhases); for (int j = 0; j < numberPhases; j++) { TimeValue after = randomTimeValue(0, 1_000_000_000, TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS); - Map actions = Collections.emptyMap(); + Map actions = Map.of(); if (randomBoolean()) { - actions = Collections.singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE); + actions = Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE); } String phaseName = randomAlphaOfLength(10); phases.put(phaseName, new Phase(phaseName, after, actions)); @@ -204,7 +199,7 @@ public static IndexLifecycleMetadata createTestInstance(int numPolicies, Operati policyName, new LifecyclePolicyMetadata( newTestLifecyclePolicy(policyName, phases), - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ) diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java index 8a4859fcd8b77..374f10b604f18 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java @@ -73,8 +73,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -89,7 +87,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; -import static java.util.stream.Collectors.toList; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING; @@ -248,7 +245,7 @@ public void testRunPolicyErrorStepOnRetryableFailedStep() { List waitForRolloverStepList = action.toSteps(client, phaseName, null) .stream() .filter(s -> s.getKey().name().equals(WaitForRolloverReadyStep.NAME)) - .collect(toList()); + .toList(); assertThat(waitForRolloverStepList.size(), is(1)); Step waitForRolloverStep = waitForRolloverStepList.get(0); StepKey stepKey = waitForRolloverStep.getKey(); @@ -288,7 +285,7 @@ public void testRunStateChangePolicyWithNoNextStep() throws Exception { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -317,7 +314,7 @@ public void testRunStateChangePolicyWithNextStep() throws Exception { StepKey nextStepKey = new StepKey("phase", "action", "next_cluster_state_action_step"); MockClusterStateActionStep step = new MockClusterStateActionStep(stepKey, nextStepKey); MockClusterStateActionStep nextStep = new MockClusterStateActionStep(nextStepKey, null); - MockPolicyStepsRegistry stepRegistry = createMultiStepPolicyStepRegistry(policyName, Arrays.asList(step, nextStep)); + MockPolicyStepsRegistry stepRegistry = createMultiStepPolicyStepRegistry(policyName, List.of(step, nextStep)); stepRegistry.setResolver((i, k) -> { if (stepKey.equals(k)) { return step; @@ -340,7 +337,7 @@ public void testRunStateChangePolicyWithNextStep() throws Exception { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -427,7 +424,7 @@ public void doTestRunPolicyWithFailureToReadPolicy(boolean asyncAction, boolean .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -476,7 +473,7 @@ public void testRunAsyncActionDoesNotRun() { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -503,7 +500,7 @@ public void testRunStateChangePolicyWithAsyncActionNextStep() throws Exception { StepKey nextStepKey = new StepKey("phase", "action", "async_action_step"); MockClusterStateActionStep step = new MockClusterStateActionStep(stepKey, nextStepKey); MockAsyncActionStep nextStep = new MockAsyncActionStep(nextStepKey, null); - MockPolicyStepsRegistry stepRegistry = createMultiStepPolicyStepRegistry(policyName, Arrays.asList(step, nextStep)); + MockPolicyStepsRegistry stepRegistry = createMultiStepPolicyStepRegistry(policyName, List.of(step, nextStep)); stepRegistry.setResolver((i, k) -> { if (stepKey.equals(k)) { return step; @@ -526,7 +523,7 @@ public void testRunStateChangePolicyWithAsyncActionNextStep() throws Exception { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -603,7 +600,7 @@ public void testRunPeriodicStep() throws Exception { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -785,7 +782,7 @@ public void testGetCurrentStep() { Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); LifecyclePolicy policy = LifecyclePolicyTests.randomTimeseriesLifecyclePolicyWithAllPhases(policyName); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong()); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong()); String phaseName = randomFrom(policy.getPhases().keySet()); Phase phase = policy.getPhases().get(phaseName); PhaseExecutionInfo pei = new PhaseExecutionInfo(policy.getName(), phase, 1, randomNonNegativeLong()); @@ -824,7 +821,7 @@ public void testIsReadyToTransition() { StepKey stepKey = new StepKey("phase", MockAction.NAME, MockAction.NAME); MockAsyncActionStep step = new MockAsyncActionStep(stepKey, null); SortedMap lifecyclePolicyMap = new TreeMap<>( - Collections.singletonMap( + Map.of( policyName, new LifecyclePolicyMetadata( createPolicy(policyName, null, step.getKey()), @@ -834,9 +831,9 @@ public void testIsReadyToTransition() { ) ) ); - Map firstStepMap = Collections.singletonMap(policyName, step); - Map policySteps = Collections.singletonMap(step.getKey(), step); - Map> stepMap = Collections.singletonMap(policyName, policySteps); + Map firstStepMap = Map.of(policyName, step); + Map policySteps = Map.of(step.getKey(), step); + Map> stepMap = Map.of(policyName, policySteps); PolicyStepsRegistry policyStepsRegistry = new PolicyStepsRegistry( lifecyclePolicyMap, firstStepMap, @@ -897,7 +894,7 @@ private static LifecyclePolicy createPolicy(String policyName, StepKey safeStep, assert unsafeStep == null || safeStep.phase().equals(unsafeStep.phase()) == false : "safe and unsafe actions must be in different phases"; Map actions = new HashMap<>(); - List steps = Collections.singletonList(new MockStep(safeStep, null)); + List steps = List.of(new MockStep(safeStep, null)); MockAction safeAction = new MockAction(steps, true); actions.put(safeAction.getWriteableName(), safeAction); Phase phase = new Phase(safeStep.phase(), TimeValue.timeValueMillis(0), actions); @@ -906,7 +903,7 @@ private static LifecyclePolicy createPolicy(String policyName, StepKey safeStep, if (unsafeStep != null) { assert MockAction.NAME.equals(unsafeStep.action()) : "The unsafe action needs to be MockAction.NAME"; Map actions = new HashMap<>(); - List steps = Collections.singletonList(new MockStep(unsafeStep, null)); + List steps = List.of(new MockStep(unsafeStep, null)); MockAction unsafeAction = new MockAction(steps, false); actions.put(unsafeAction.getWriteableName(), unsafeAction); Phase phase = new Phase(unsafeStep.phase(), TimeValue.timeValueMillis(0), actions); @@ -1233,7 +1230,7 @@ public Step getStep(IndexMetadata indexMetadata, StepKey stepKey) { } public static MockPolicyStepsRegistry createOneStepPolicyStepRegistry(String policyName, Step step) { - return createMultiStepPolicyStepRegistry(policyName, Collections.singletonList(step)); + return createMultiStepPolicyStepRegistry(policyName, List.of(step)); } public static MockPolicyStepsRegistry createMultiStepPolicyStepRegistry(String policyName, List steps) { diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index eceb81542377a..b77e643bc2853 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -58,9 +58,9 @@ import java.time.Clock; import java.time.Instant; import java.time.ZoneId; -import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.UUID; @@ -114,7 +114,7 @@ public void prepareServices() { }).when(executorService).execute(any()); Settings settings = Settings.builder().put(LifecycleSettings.LIFECYCLE_POLL_INTERVAL, "1s").build(); when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(settings, Collections.singleton(LifecycleSettings.LIFECYCLE_POLL_INTERVAL_SETTING)) + new ClusterSettings(settings, Set.of(LifecycleSettings.LIFECYCLE_POLL_INTERVAL_SETTING)) ); when(clusterService.lifecycleState()).thenReturn(State.STARTED); @@ -154,14 +154,11 @@ public void testStoppedModeSkip() { randomStepKey(), randomStepKey() ); - MockAction mockAction = new MockAction(Collections.singletonList(mockStep)); - Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase)); + MockAction mockAction = new MockAction(List.of(mockStep)); + Phase phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Map.of(phase.getName(), phase)); SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policyName, - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); IndexMetadata indexMetadata = IndexMetadata.builder(index.getName()) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) @@ -191,14 +188,11 @@ public void testRequestedStopOnShrink() { mockShrinkStep, randomStepKey() ); - MockAction mockAction = new MockAction(Collections.singletonList(mockStep)); - Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase)); + MockAction mockAction = new MockAction(List.of(mockStep)); + Phase phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Map.of(phase.getName(), phase)); SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policyName, - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); lifecycleState.setPhase(mockShrinkStep.phase()); @@ -250,14 +244,11 @@ private void verifyCanStopWithStep(String stoppableStep) { mockShrinkStep, randomStepKey() ); - MockAction mockAction = new MockAction(Collections.singletonList(mockStep)); - Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase)); + MockAction mockAction = new MockAction(List.of(mockStep)); + Phase phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Map.of(phase.getName(), phase)); SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policyName, - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); lifecycleState.setPhase(mockShrinkStep.phase()); @@ -301,14 +292,11 @@ public void testRequestedStopOnSafeAction() { currentStepKey, randomStepKey() ); - MockAction mockAction = new MockAction(Collections.singletonList(mockStep)); - Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase)); + MockAction mockAction = new MockAction(List.of(mockStep)); + Phase phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Map.of(phase.getName(), phase)); SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policyName, - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); lifecycleState.setPhase(currentStepKey.phase()); @@ -370,9 +358,9 @@ public void doTestExceptionStillProcessesOtherIndices(boolean useOnMaster) { } else { i1mockStep = new IndexLifecycleRunnerTests.MockClusterStateActionStep(i1currentStepKey, randomStepKey()); } - MockAction i1mockAction = new MockAction(Collections.singletonList(i1mockStep)); - Phase i1phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", i1mockAction)); - LifecyclePolicy i1policy = newTestLifecyclePolicy(policy1, Collections.singletonMap(i1phase.getName(), i1phase)); + MockAction i1mockAction = new MockAction(List.of(i1mockStep)); + Phase i1phase = new Phase("phase", TimeValue.ZERO, Map.of("action", i1mockAction)); + LifecyclePolicy i1policy = newTestLifecyclePolicy(policy1, Map.of(i1phase.getName(), i1phase)); Index index1 = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); LifecycleExecutionState.Builder i1lifecycleState = LifecycleExecutionState.builder(); i1lifecycleState.setPhase(i1currentStepKey.phase()); @@ -387,9 +375,9 @@ public void doTestExceptionStillProcessesOtherIndices(boolean useOnMaster) { } else { i2mockStep = new IndexLifecycleRunnerTests.MockClusterStateActionStep(i2currentStepKey, randomStepKey()); } - MockAction mockAction = new MockAction(Collections.singletonList(i2mockStep)); - Phase i2phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy i2policy = newTestLifecyclePolicy(policy1, Collections.singletonMap(i2phase.getName(), i1phase)); + MockAction mockAction = new MockAction(List.of(i2mockStep)); + Phase i2phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy i2policy = newTestLifecyclePolicy(policy1, Map.of(i2phase.getName(), i1phase)); Index index2 = new Index( randomValueOtherThan(index1.getName(), () -> randomAlphaOfLengthBetween(1, 20)), randomAlphaOfLengthBetween(1, 20) @@ -422,14 +410,8 @@ public void doTestExceptionStillProcessesOtherIndices(boolean useOnMaster) { } SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policy1, - new LifecyclePolicyMetadata(i1policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); - policyMap.put( - policy2, - new LifecyclePolicyMetadata(i2policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policy1, new LifecyclePolicyMetadata(i1policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); + policyMap.put(policy2, new LifecyclePolicyMetadata(i2policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); IndexMetadata i1indexMetadata = IndexMetadata.builder(index1.getName()) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policy1)) @@ -533,14 +515,8 @@ public void testIndicesOnShuttingDownNodesInDangerousStep() { SingleNodeShutdownMetadata.Type.REPLACE )) { ClusterState state = ClusterState.builder(ClusterName.DEFAULT).build(); - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), - equalTo(Collections.emptySet()) - ); - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), - equalTo(Collections.emptySet()) - ); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), equalTo(Set.of())); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), equalTo(Set.of())); IndexMetadata nonDangerousIndex = IndexMetadata.builder("no_danger") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy")) @@ -583,7 +559,7 @@ public void testIndicesOnShuttingDownNodesInDangerousStep() { Map indices = Map.of("no_danger", nonDangerousIndex, "danger", dangerousIndex); Metadata metadata = Metadata.builder() - .putCustom(IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING)) + .putCustom(IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING)) .indices(indices) .persistentSettings(settings(IndexVersion.current()).build()) .build(); @@ -612,14 +588,8 @@ public void testIndicesOnShuttingDownNodesInDangerousStep() { .build(); // No danger yet, because no node is shutting down - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), - equalTo(Collections.emptySet()) - ); - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), - equalTo(Collections.emptySet()) - ); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), equalTo(Set.of())); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), equalTo(Set.of())); state = ClusterState.builder(state) .metadata( @@ -627,7 +597,7 @@ public void testIndicesOnShuttingDownNodesInDangerousStep() { .putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( - Collections.singletonMap( + Map.of( "shutdown_node", SingleNodeShutdownMetadata.builder() .setNodeId("shutdown_node") @@ -642,15 +612,12 @@ public void testIndicesOnShuttingDownNodesInDangerousStep() { ) .build(); - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), - equalTo(Collections.emptySet()) - ); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), equalTo(Set.of())); // No danger, because this is a "RESTART" type shutdown assertThat( "restart type shutdowns are not considered dangerous", IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), - equalTo(Collections.emptySet()) + equalTo(Set.of()) ); final String targetNodeName = type == SingleNodeShutdownMetadata.Type.REPLACE ? randomAlphaOfLengthBetween(10, 20) : null; @@ -661,7 +628,7 @@ public void testIndicesOnShuttingDownNodesInDangerousStep() { .putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( - Collections.singletonMap( + Map.of( "shutdown_node", SingleNodeShutdownMetadata.builder() .setNodeId("shutdown_node") @@ -679,10 +646,7 @@ public void testIndicesOnShuttingDownNodesInDangerousStep() { .build(); // The dangerous index should be calculated as being in danger now - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), - equalTo(Collections.singleton("danger")) - ); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), equalTo(Set.of("danger"))); } } } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransitionTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransitionTests.java index 49aa0a65a5704..a1f51f1fae90f 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransitionTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransitionTests.java @@ -48,7 +48,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -80,8 +79,8 @@ public void testMoveClusterStateToNextStep() { .stream() .findFirst() .orElseThrow(() -> new AssertionError("expected next phase to be present")); - List policyMetadatas = Collections.singletonList( - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + List policyMetadatas = List.of( + new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); Step.StepKey currentStep = new Step.StepKey("current_phase", "current_action", "current_step"); Step.StepKey nextStep = new Step.StepKey(nextPhase.getName(), "next_action", "next_step"); @@ -128,8 +127,8 @@ public void testMoveClusterStateToNextStepSamePhase() { p -> p.getPhases().isEmpty(), () -> LifecyclePolicyTests.randomTestLifecyclePolicy("policy") ); - List policyMetadatas = Collections.singletonList( - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + List policyMetadatas = List.of( + new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); Step.StepKey currentStep = new Step.StepKey("current_phase", "current_action", "current_step"); Step.StepKey nextStep = new Step.StepKey("current_phase", "next_action", "next_step"); @@ -179,8 +178,8 @@ public void testMoveClusterStateToNextStepSameAction() { p -> p.getPhases().isEmpty(), () -> LifecyclePolicyTests.randomTestLifecyclePolicy("policy") ); - List policyMetadatas = Collections.singletonList( - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + List policyMetadatas = List.of( + new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); Step.StepKey currentStep = new Step.StepKey("current_phase", "current_action", "current_step"); Step.StepKey nextStep = new Step.StepKey("current_phase", "current_action", "next_step"); @@ -236,8 +235,8 @@ public void testSuccessfulValidatedMoveClusterStateToNextStep() { .stream() .findFirst() .orElseThrow(() -> new AssertionError("expected next phase to be present")); - List policyMetadatas = Collections.singletonList( - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + List policyMetadatas = List.of( + new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); Step.StepKey currentStepKey = new Step.StepKey("current_phase", "current_action", "current_step"); Step.StepKey nextStepKey = new Step.StepKey(nextPhase.getName(), "next_action", "next_step"); @@ -279,7 +278,7 @@ public void testValidatedMoveClusterStateToNextStepWithoutPolicy() { lifecycleState.setAction(currentStepKey.action()); lifecycleState.setStep(currentStepKey.name()); - ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of()); Index index = clusterState.metadata().index(indexName).getIndex(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, @@ -303,7 +302,7 @@ public void testValidatedMoveClusterStateToNextStepInvalidNextStep() { lifecycleState.setAction(currentStepKey.action()); lifecycleState.setStep(currentStepKey.name()); - ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of()); Index index = clusterState.metadata().index(indexName).getIndex(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, @@ -325,7 +324,7 @@ public void testMoveClusterStateToErrorStep() throws IOException { lifecycleState.setPhase(currentStep.phase()); lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); - ClusterState clusterState = buildClusterState(indexName, Settings.builder(), lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, Settings.builder(), lifecycleState.build(), List.of()); Index index = clusterState.metadata().index(indexName).getIndex(); ClusterState newClusterState = IndexLifecycleTransition.moveClusterStateToErrorStep( @@ -359,7 +358,7 @@ public void testAddStepInfoToClusterState() throws IOException { lifecycleState.setPhase(currentStep.phase()); lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); - ClusterState clusterState = buildClusterState(indexName, Settings.builder(), lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, Settings.builder(), lifecycleState.build(), List.of()); Index index = clusterState.metadata().index(indexName).getIndex(); ClusterState newClusterState = IndexLifecycleTransition.addStepInfoToClusterState(index, clusterState, stepInfo); assertClusterStateStepInfo(clusterState, index, currentStep, newClusterState, stepInfo); @@ -378,9 +377,7 @@ public void testRemovePolicyForIndex() { lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(oldPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(oldPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), policyMetadatas); Index index = clusterState.metadata().index(indexName).getIndex(); Index[] indices = new Index[] { index }; @@ -399,7 +396,7 @@ public void testRemovePolicyForIndexNoCurrentPolicy() { indexName, indexSettingsBuilder, LifecycleExecutionState.builder().build(), - Collections.emptyList() + List.of() ); Index index = clusterState.metadata().index(indexName).getIndex(); Index[] indices = new Index[] { index }; @@ -414,7 +411,7 @@ public void testRemovePolicyForIndexNoCurrentPolicy() { public void testRemovePolicyForIndexIndexDoesntExist() { String indexName = randomAlphaOfLength(10); String oldPolicyName = "old_policy"; - LifecyclePolicy oldPolicy = newTestLifecyclePolicy(oldPolicyName, Collections.emptyMap()); + LifecyclePolicy oldPolicy = newTestLifecyclePolicy(oldPolicyName, Map.of()); Step.StepKey currentStep = AbstractStepTestCase.randomStepKey(); Settings.Builder indexSettingsBuilder = Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, oldPolicyName); LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); @@ -422,9 +419,7 @@ public void testRemovePolicyForIndexIndexDoesntExist() { lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(oldPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(oldPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), policyMetadatas); Index index = new Index("doesnt_exist", "im_not_here"); Index[] indices = new Index[] { index }; @@ -448,9 +443,7 @@ public void testRemovePolicyForIndexIndexInUnsafe() { lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(oldPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(oldPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), policyMetadatas); Index index = clusterState.metadata().index(indexName).getIndex(); Index[] indices = new Index[] { index }; @@ -475,9 +468,7 @@ public void testRemovePolicyWithIndexingComplete() { lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(oldPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(oldPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), policyMetadatas); Index index = clusterState.metadata().index(indexName).getIndex(); Index[] indices = new Index[] { index }; @@ -756,7 +747,7 @@ public void testMoveClusterStateToFailedStep() { LifecyclePolicy policy = createPolicy(policyName, failedStepKey, null); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -771,12 +762,7 @@ public void testMoveClusterStateToFailedStep() { lifecycleState.setStep(errorStepKey.name()); lifecycleState.setStepTime(now); lifecycleState.setFailedStep(failedStepKey.name()); - ClusterState clusterState = buildClusterState( - indexName, - indexSettingsBuilder, - lifecycleState.build(), - Collections.singletonList(policyMetadata) - ); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of(policyMetadata)); Index index = clusterState.metadata().index(indexName).getIndex(); ClusterState nextClusterState = IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep( clusterState, @@ -802,7 +788,7 @@ public void testMoveClusterStateToFailedStepWithUnknownStep() { LifecyclePolicy policy = createPolicy(policyName, failedStepKey, null); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -817,12 +803,7 @@ public void testMoveClusterStateToFailedStepWithUnknownStep() { lifecycleState.setStep(errorStepKey.name()); lifecycleState.setStepTime(now); lifecycleState.setFailedStep(failedStepKey.name()); - ClusterState clusterState = buildClusterState( - indexName, - indexSettingsBuilder, - lifecycleState.build(), - Collections.singletonList(policyMetadata) - ); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of(policyMetadata)); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep(clusterState, indexName, () -> now, policyRegistry, false) @@ -840,7 +821,7 @@ public void testMoveClusterStateToFailedStepIndexNotFound() { existingIndexName, Settings.builder(), LifecycleExecutionState.builder().build(), - Collections.emptyList() + List.of() ); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, @@ -863,7 +844,7 @@ public void testMoveClusterStateToFailedStepInvalidPolicySetting() { lifecycleState.setAction(errorStepKey.action()); lifecycleState.setStep(errorStepKey.name()); lifecycleState.setFailedStep(failedStepKey.name()); - ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of()); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep(clusterState, indexName, () -> now, policyRegistry, false) @@ -883,7 +864,7 @@ public void testMoveClusterStateToFailedNotOnError() { lifecycleState.setPhase(failedStepKey.phase()); lifecycleState.setAction(failedStepKey.action()); lifecycleState.setStep(failedStepKey.name()); - ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of()); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep(clusterState, indexName, () -> now, policyRegistry, false) @@ -906,7 +887,7 @@ public void testMoveClusterStateToPreviouslyFailedStepAsAutomaticRetryAndSetsPre LifecyclePolicy policy = createPolicy(policyName, failedStepKey, null); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -923,12 +904,7 @@ public void testMoveClusterStateToPreviouslyFailedStepAsAutomaticRetryAndSetsPre lifecycleState.setFailedStep(failedStepKey.name()); String initialStepInfo = randomAlphaOfLengthBetween(10, 50); lifecycleState.setStepInfo(initialStepInfo); - ClusterState clusterState = buildClusterState( - indexName, - indexSettingsBuilder, - lifecycleState.build(), - Collections.singletonList(policyMetadata) - ); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of(policyMetadata)); Index index = clusterState.metadata().index(indexName).getIndex(); ClusterState nextClusterState = IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep( clusterState, @@ -976,13 +952,11 @@ public void testMoveToFailedStepDoesntRefreshCachedPhaseWhenUnsafe() { Map actions = new HashMap<>(); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy currentPolicy = new LifecyclePolicy("my-policy", phases); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(currentPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(currentPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Step.StepKey errorStepKey = new Step.StepKey("hot", RolloverAction.NAME, ErrorStep.NAME); PolicyStepsRegistry stepsRegistry = createOneStepPolicyStepRegistry("my-policy", new ErrorStep(errorStepKey)); @@ -1040,9 +1014,9 @@ public void testRefreshPhaseJson() throws IOException { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), 2L, 2L); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Map.of(), 2L, 2L); ClusterState existingState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder(Metadata.EMPTY_METADATA).put(meta, false).build()) @@ -1185,7 +1159,7 @@ public void testMoveStateToNextActionAndUpdateCachedPhase() { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy currentPolicy = new LifecyclePolicy("my-policy", phases); { @@ -1195,10 +1169,10 @@ public void testMoveStateToNextActionAndUpdateCachedPhase() { Map actionsWithoutRollover = new HashMap<>(); actionsWithoutRollover.put("set_priority", new SetPriorityAction(100)); Phase hotPhaseNoRollover = new Phase("hot", TimeValue.ZERO, actionsWithoutRollover); - Map phasesNoRollover = Collections.singletonMap("hot", hotPhaseNoRollover); + Map phasesNoRollover = Map.of("hot", hotPhaseNoRollover); LifecyclePolicyMetadata updatedPolicyMetadata = new LifecyclePolicyMetadata( new LifecyclePolicy("my-policy", phasesNoRollover), - Collections.emptyMap(), + Map.of(), 2L, 2L ); @@ -1233,10 +1207,10 @@ public void testMoveStateToNextActionAndUpdateCachedPhase() { Map actionsWitoutSetPriority = new HashMap<>(); actionsWitoutSetPriority.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); Phase hotPhaseNoSetPriority = new Phase("hot", TimeValue.ZERO, actionsWitoutSetPriority); - Map phasesWithoutSetPriority = Collections.singletonMap("hot", hotPhaseNoSetPriority); + Map phasesWithoutSetPriority = Map.of("hot", hotPhaseNoSetPriority); LifecyclePolicyMetadata updatedPolicyMetadata = new LifecyclePolicyMetadata( new LifecyclePolicy("my-policy", phasesWithoutSetPriority), - Collections.emptyMap(), + Map.of(), 2L, 2L ); @@ -1275,7 +1249,7 @@ private static LifecyclePolicy createPolicy(String policyName, Step.StepKey safe assert unsafeStep == null || safeStep.phase().equals(unsafeStep.phase()) == false : "safe and unsafe actions must be in different phases"; Map actions = new HashMap<>(); - List steps = Collections.singletonList(new MockStep(safeStep, null)); + List steps = List.of(new MockStep(safeStep, null)); MockAction safeAction = new MockAction(steps, true); actions.put(safeAction.getWriteableName(), safeAction); Phase phase = new Phase(safeStep.phase(), TimeValue.timeValueMillis(0), actions); @@ -1284,7 +1258,7 @@ private static LifecyclePolicy createPolicy(String policyName, Step.StepKey safe if (unsafeStep != null) { assert MockAction.NAME.equals(unsafeStep.action()) : "The unsafe action needs to be MockAction.NAME"; Map actions = new HashMap<>(); - List steps = Collections.singletonList(new MockStep(unsafeStep, null)); + List steps = List.of(new MockStep(unsafeStep, null)); MockAction unsafeAction = new MockAction(steps, false); actions.put(unsafeAction.getWriteableName(), unsafeAction); Phase phase = new Phase(unsafeStep.phase(), TimeValue.timeValueMillis(0), actions); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java index eee3fe3ce53c2..81688ec1503cd 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.junit.Before; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; import static org.hamcrest.Matchers.containsString; @@ -53,10 +53,7 @@ public void setupClusterState() { .build(); index = indexMetadata.getIndex(); IndexLifecycleMetadata ilmMeta = new IndexLifecycleMetadata( - Collections.singletonMap( - policy, - new LifecyclePolicyMetadata(lifecyclePolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ), + Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())), OperationMode.RUNNING ); Metadata metadata = Metadata.builder() diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToNextStepUpdateTaskTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToNextStepUpdateTaskTests.java index f9a8d4a2ab486..554e9a48c625e 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToNextStepUpdateTaskTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToNextStepUpdateTaskTests.java @@ -29,7 +29,6 @@ import org.junit.Before; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; @@ -67,10 +66,7 @@ public void setupClusterState() { index = indexMetadata.getIndex(); lifecyclePolicy = LifecyclePolicyTests.randomTestLifecyclePolicy(policy); IndexLifecycleMetadata ilmMeta = new IndexLifecycleMetadata( - Collections.singletonMap( - policy, - new LifecyclePolicyMetadata(lifecyclePolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ), + Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())), OperationMode.RUNNING ); Metadata metadata = Metadata.builder() @@ -95,7 +91,7 @@ public void testExecuteSuccessfullyMoved() throws Exception { AlwaysExistingStepRegistry stepRegistry = new AlwaysExistingStepRegistry(client); stepRegistry.update( new IndexLifecycleMetadata( - Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Collections.emptyMap(), 2L, 2L)), + Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), 2L, 2L)), OperationMode.RUNNING ) ); @@ -169,7 +165,7 @@ public void testExecuteSuccessfulMoveWithInvalidNextStep() throws Exception { AlwaysExistingStepRegistry stepRegistry = new AlwaysExistingStepRegistry(client); stepRegistry.update( new IndexLifecycleMetadata( - Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Collections.emptyMap(), 2L, 2L)), + Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), 2L, 2L)), OperationMode.RUNNING ) ); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java index 36d537a57382c..f61267d40a513 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java @@ -46,7 +46,6 @@ import org.elasticsearch.xpack.core.ilm.Step; import org.mockito.Mockito; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -76,7 +75,7 @@ private IndexMetadata emptyMetadata(Index index) { public void testGetFirstStep() { String policyName = randomAlphaOfLengthBetween(2, 10); Step expectedFirstStep = new MockStep(MOCK_STEP_KEY, null); - Map firstStepMap = Collections.singletonMap(policyName, expectedFirstStep); + Map firstStepMap = Map.of(policyName, expectedFirstStep); PolicyStepsRegistry registry = new PolicyStepsRegistry(null, firstStepMap, null, NamedXContentRegistry.EMPTY, null, null); Step actualFirstStep = registry.getFirstStep(policyName); assertThat(actualFirstStep, sameInstance(expectedFirstStep)); @@ -85,7 +84,7 @@ public void testGetFirstStep() { public void testGetFirstStepUnknownPolicy() { String policyName = randomAlphaOfLengthBetween(2, 10); Step expectedFirstStep = new MockStep(MOCK_STEP_KEY, null); - Map firstStepMap = Collections.singletonMap(policyName, expectedFirstStep); + Map firstStepMap = Map.of(policyName, expectedFirstStep); PolicyStepsRegistry registry = new PolicyStepsRegistry(null, firstStepMap, null, NamedXContentRegistry.EMPTY, null, null); Step actualFirstStep = registry.getFirstStep(policyName + "unknown"); assertNull(actualFirstStep); @@ -95,7 +94,7 @@ public void testGetStep() { Client client = mock(Client.class); Mockito.when(client.settings()).thenReturn(Settings.EMPTY); LifecyclePolicy policy = LifecyclePolicyTests.randomTimeseriesLifecyclePolicyWithAllPhases("policy"); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong()); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong()); String phaseName = randomFrom(policy.getPhases().keySet()); Phase phase = policy.getPhases().get(phaseName); PhaseExecutionInfo pei = new PhaseExecutionInfo(policy.getName(), phase, 1, randomNonNegativeLong()); @@ -119,7 +118,7 @@ public void testGetStepErrorStep() { Step.StepKey errorStepKey = new Step.StepKey(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), ErrorStep.NAME); Step expectedStep = new ErrorStep(errorStepKey); Index index = new Index("test", "uuid"); - Map> indexSteps = Collections.singletonMap(index, Collections.singletonList(expectedStep)); + Map> indexSteps = Map.of(index, List.of(expectedStep)); PolicyStepsRegistry registry = new PolicyStepsRegistry(null, null, null, NamedXContentRegistry.EMPTY, null, null); Step actualStep = registry.getStep(emptyMetadata(index), errorStepKey); assertThat(actualStep, equalTo(expectedStep)); @@ -143,7 +142,7 @@ public void testGetStepForIndexWithNoPhaseGetsInitializationStep() { Client client = mock(Client.class); Mockito.when(client.settings()).thenReturn(Settings.EMPTY); LifecyclePolicy policy = LifecyclePolicyTests.randomTimeseriesLifecyclePolicy("policy"); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong()); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong()); IndexMetadata indexMetadata = IndexMetadata.builder("test") .settings(indexSettings(IndexVersion.current(), 1, 0).put(LifecycleSettings.LIFECYCLE_NAME, "policy").build()) .build(); @@ -158,7 +157,7 @@ public void testGetStepUnknownStepKey() { Client client = mock(Client.class); Mockito.when(client.settings()).thenReturn(Settings.EMPTY); LifecyclePolicy policy = LifecyclePolicyTests.randomTimeseriesLifecyclePolicyWithAllPhases("policy"); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong()); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong()); String phaseName = randomFrom(policy.getPhases().keySet()); Phase phase = policy.getPhases().get(phaseName); PhaseExecutionInfo pei = new PhaseExecutionInfo(policy.getName(), phase, 1, randomNonNegativeLong()); @@ -193,7 +192,7 @@ public void testUpdateFromNothingToSomethingToNothing() throws Exception { headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); } - Map policyMap = Collections.singletonMap( + Map policyMap = Map.of( newPolicy.getName(), new LifecyclePolicyMetadata(newPolicy, headers, randomNonNegativeLong(), randomNonNegativeLong()) ); @@ -271,7 +270,7 @@ public void testUpdateFromNothingToSomethingToNothing() throws Exception { assertThat(registry.getStepMap(), equalTo(registryStepMap)); // remove policy - lifecycleMetadata = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + lifecycleMetadata = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); currentState = ClusterState.builder(currentState) .metadata(Metadata.builder(metadata).putCustom(IndexLifecycleMetadata.TYPE, lifecycleMetadata)) .build(); @@ -291,7 +290,7 @@ public void testUpdateChangedPolicy() { headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); } - Map policyMap = Collections.singletonMap( + Map policyMap = Map.of( newPolicy.getName(), new LifecyclePolicyMetadata(newPolicy, headers, randomNonNegativeLong(), randomNonNegativeLong()) ); @@ -316,10 +315,7 @@ public void testUpdateChangedPolicy() { // swap out policy newPolicy = LifecyclePolicyTests.randomTestLifecyclePolicy(policyName); lifecycleMetadata = new IndexLifecycleMetadata( - Collections.singletonMap( - policyName, - new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ), + Map.of(policyName, new LifecyclePolicyMetadata(newPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())), OperationMode.RUNNING ); currentState = ClusterState.builder(currentState) @@ -356,7 +352,7 @@ public void testUpdatePolicyButNoPhaseChangeIndexStepsDontChange() throws Except headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); } - Map policyMap = Collections.singletonMap( + Map policyMap = Map.of( newPolicy.getName(), new LifecyclePolicyMetadata(newPolicy, headers, randomNonNegativeLong(), randomNonNegativeLong()) ); @@ -411,7 +407,7 @@ public void testUpdatePolicyButNoPhaseChangeIndexStepsDontChange() throws Except assertThat(((ShrinkStep) gotStep).getNumberOfShards(), equalTo(1)); // Update the policy with the new policy, but keep the phase the same - policyMap = Collections.singletonMap( + policyMap = Map.of( updatedPolicy.getName(), new LifecyclePolicyMetadata(updatedPolicy, headers, randomNonNegativeLong(), randomNonNegativeLong()) ); @@ -457,7 +453,7 @@ public void testGetStepMultithreaded() throws Exception { .build(); SortedMap metas = new TreeMap<>(); - metas.put("policy", new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong())); + metas.put("policy", new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong())); IndexLifecycleMetadata meta = new IndexLifecycleMetadata(metas, OperationMode.RUNNING); PolicyStepsRegistry registry = new PolicyStepsRegistry(REGISTRY, client, null); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/StagnatingIndicesFinderTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/StagnatingIndicesFinderTests.java index be2d449353242..95412f92b6156 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/StagnatingIndicesFinderTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/StagnatingIndicesFinderTests.java @@ -28,7 +28,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.LongSupplier; -import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -61,7 +60,7 @@ public void testStagnatingIndicesFinder() { assertEquals(expectedMaxTimeOnStep, maxTimeOnStep); assertEquals(expectedMaxRetriesPerStep, maxRetriesPerStep); return rc; - }).collect(Collectors.toList()); + }).toList(); // Per the evaluator, the timeSupplier _must_ be called only twice when(mockedTimeSupplier.getAsLong()).thenReturn(instant, instant); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportStopILMActionTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportStopILMActionTests.java index 8c0fede4c11dc..bd0d63ebb0f3d 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportStopILMActionTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportStopILMActionTests.java @@ -24,7 +24,8 @@ import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.mockito.ArgumentMatcher; -import static java.util.Collections.emptyMap; +import java.util.Map; + import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; @@ -50,7 +51,7 @@ public void testStopILMClusterStatePriorityIsImmediate() { ILMActions.STOP.name(), "description", new TaskId(randomLong() + ":" + randomLong()), - emptyMap() + Map.of() ); StopILMRequest request = new StopILMRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT); transportStopILMAction.masterOperation(task, request, ClusterState.EMPTY_STATE, ActionListener.noop()); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java index 58335eb53b366..d38503a884092 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java @@ -19,7 +19,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Objects; import static org.elasticsearch.core.Strings.format; @@ -112,13 +111,10 @@ protected void put(String inferenceId, String modelConfig, TaskType taskType) th @SuppressWarnings("unchecked") // in version 8.15, there was a breaking change where "models" was renamed to "endpoints" LinkedList> getConfigsWithBreakingChangeHandling(TaskType testTaskType, String oldClusterId) throws IOException { - + var response = get(testTaskType, oldClusterId); LinkedList> configs; - configs = new LinkedList<>( - (List>) Objects.requireNonNullElse((get(testTaskType, oldClusterId).get("endpoints")), List.of()) - ); - configs.addAll(Objects.requireNonNullElse((List>) get(testTaskType, oldClusterId).get("models"), List.of())); - + configs = new LinkedList<>((List>) response.getOrDefault("endpoints", List.of())); + configs.addAll((List>) response.getOrDefault("models", List.of())); return configs; } } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 3b0fc869c8124..c7b3a9d42f579 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; @@ -58,7 +59,7 @@ public void setup() throws Exception { @Override protected Collection> nodePlugins() { - return Arrays.asList(Utils.TestInferencePlugin.class); + return Arrays.asList(LocalStateInferencePlugin.class); } public void testBulkOperations() throws Exception { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index be6b3725b0f35..d5c156d1d4f46 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; @@ -76,7 +76,7 @@ public void createComponents() { @Override protected Collection> getPlugins() { - return pluginList(ReindexPlugin.class, InferencePlugin.class); + return pluginList(ReindexPlugin.class, LocalStateInferencePlugin.class); } public void testStoreModel() throws Exception { diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 53974657e4e23..1c2240e8c5217 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -34,6 +34,7 @@ requires software.amazon.awssdk.retries.api; requires org.reactivestreams; requires org.elasticsearch.logging; + requires org.elasticsearch.sslconfig; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 67892dfe78624..3b7613b8b0e1f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -10,6 +10,7 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.queries.SemanticMatchQueryRewriteInterceptor; import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; @@ -43,7 +44,8 @@ public Set getTestFeatures() { SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX, - SEMANTIC_TEXT_HIGHLIGHTER + SEMANTIC_TEXT_HIGHLIGHTER, + SemanticMatchQueryRewriteInterceptor.SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 148a784456361..93743a5485c2c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -28,6 +28,7 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.node.PluginComponentBinding; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; @@ -35,6 +36,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SystemIndexPlugin; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; @@ -44,6 +46,7 @@ import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceDiagnosticsAction; @@ -53,6 +56,7 @@ import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; +import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceEndpointAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceDiagnosticsAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; @@ -74,6 +78,7 @@ import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.mapper.OffsetSourceFieldMapper; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.queries.SemanticMatchQueryRewriteInterceptor; import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; @@ -116,7 +121,6 @@ import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.singletonList; @@ -150,6 +154,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); + private final SetOnce elasicInferenceServiceFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); @@ -232,31 +237,31 @@ public Collection createComponents(PluginServices services) { var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing - // internal names like "eis" or "gateway". - ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); - - String elasticInferenceUrl = null; + if (isElasticInferenceServiceEnabled()) { + // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). + var elasticInferenceServiceHttpClientManager = HttpClientManager.create( + settings, + services.threadPool(), + services.clusterService(), + throttlerManager, + getSslService() + ); - if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); - } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - log.warn( - "Deprecated flag {} detected for enabling {}. Please use {}.", - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, - ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( + serviceComponents.get(), + elasticInferenceServiceHttpClientManager, + services.clusterService() ); - elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); - } + elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); - if (elasticInferenceUrl != null) { + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + String elasticInferenceUrl = this.getElasticInferenceServiceUrl(inferenceServiceSettings); elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( () -> List.of( context -> new ElasticInferenceService( - httpFactory.get(), + elasicInferenceServiceFactory.get(), serviceComponents.get(), elasticInferenceServiceComponents.get() ) @@ -379,16 +384,21 @@ public static ExecutorBuilder inferenceUtilityExecutor(Settings settings) { @Override public List> getSettings() { - return Stream.of( - HttpSettings.getSettingsDefinitions(), - HttpClientManager.getSettingsDefinitions(), - ThrottlerManager.getSettingsDefinitions(), - RetrySettings.getSettingsDefinitions(), - ElasticInferenceServiceSettings.getSettingsDefinitions(), - Truncator.getSettingsDefinitions(), - RequestExecutorServiceSettings.getSettingsDefinitions(), - List.of(SKIP_VALIDATE_AND_START) - ).flatMap(Collection::stream).collect(Collectors.toList()); + ArrayList> settings = new ArrayList<>(); + settings.addAll(HttpSettings.getSettingsDefinitions()); + settings.addAll(HttpClientManager.getSettingsDefinitions()); + settings.addAll(ThrottlerManager.getSettingsDefinitions()); + settings.addAll(RetrySettings.getSettingsDefinitions()); + settings.addAll(Truncator.getSettingsDefinitions()); + settings.addAll(RequestExecutorServiceSettings.getSettingsDefinitions()); + settings.add(SKIP_VALIDATE_AND_START); + + // Register Elastic Inference Service settings definitions if the corresponding feature flag is enabled. + if (isElasticInferenceServiceEnabled()) { + settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); + } + + return settings; } @Override @@ -428,10 +438,18 @@ public List> getQueries() { return List.of(new QuerySpec<>(SemanticQueryBuilder.NAME, SemanticQueryBuilder::new, SemanticQueryBuilder::fromXContent)); } + @Override + public List getQueryRewriteInterceptors() { + return List.of(new SemanticMatchQueryRewriteInterceptor()); + } + @Override public List> getRetrievers() { return List.of( - new RetrieverSpec<>(new ParseField(TextSimilarityRankBuilder.NAME), TextSimilarityRankRetrieverBuilder::fromXContent), + new RetrieverSpec<>( + new ParseField(TextSimilarityRankBuilder.NAME), + (parser, context) -> TextSimilarityRankRetrieverBuilder.fromXContent(parser, context, getLicenseState()) + ), new RetrieverSpec<>(new ParseField(RandomRankBuilder.NAME), RandomRankRetrieverBuilder::fromXContent) ); } @@ -440,4 +458,36 @@ public List> getRetrievers() { public Map getHighlighters() { return Map.of(SemanticTextHighlighter.NAME, new SemanticTextHighlighter()); } + + // Get Elastic Inference service URL based on feature flags to support transitioning + // to the new Elastic Inference Service URL. + private String getElasticInferenceServiceUrl(ElasticInferenceServiceSettings settings) { + String elasticInferenceUrl = null; + + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = settings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + ); + elasticInferenceUrl = settings.getEisGatewayUrl(); + } + + return elasticInferenceUrl; + } + + protected Boolean isElasticInferenceServiceEnabled() { + return (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()); + } + + protected SSLService getSslService() { + return XPackPlugin.getSharedSslService(); + } + + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java index e5d76b9bb5570..6d09c9e67b363 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java @@ -7,9 +7,14 @@ package org.elasticsearch.xpack.inference.external.http; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; +import org.apache.http.nio.conn.NoopIOSessionStrategy; +import org.apache.http.nio.conn.SchemeIOSessionStrategy; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.nio.reactor.ConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.http.pool.PoolStats; @@ -21,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; @@ -28,11 +34,13 @@ import java.util.List; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX; public class HttpClientManager implements Closeable { private static final Logger logger = LogManager.getLogger(HttpClientManager.class); /** * The maximum number of total connections the connection pool can lease to all routes. + * The configuration applies to each instance of HTTPClientManager (max_total_connections=10 and instances=5 leads to 50 connections). * From googling around the connection pools maxTotal value should be close to the number of available threads. * * https://stackoverflow.com/questions/30989637/how-to-decide-optimal-settings-for-setmaxtotal-and-setdefaultmaxperroute @@ -47,6 +55,7 @@ public class HttpClientManager implements Closeable { /** * The max number of connections a single route can lease. + * This configuration applies to each instance of HttpClientManager. */ public static final Setting MAX_ROUTE_CONNECTIONS = Setting.intSetting( "xpack.inference.http.max_route_connections", @@ -98,6 +107,22 @@ public static HttpClientManager create( return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); } + public static HttpClientManager create( + Settings settings, + ThreadPool threadPool, + ClusterService clusterService, + ThrottlerManager throttlerManager, + SSLService sslService + ) { + // Set the sslStrategy to ensure an encrypted connection, as Elastic Inference Service requires it. + SSLIOSessionStrategy sslioSessionStrategy = sslService.sslIOSessionStrategy( + sslService.getSSLConfiguration(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX) + ); + + PoolingNHttpClientConnectionManager connectionManager = createConnectionManager(sslioSessionStrategy); + return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); + } + // Default for testing HttpClientManager( Settings settings, @@ -121,6 +146,25 @@ public static HttpClientManager create( this.addSettingsUpdateConsumers(clusterService); } + private static PoolingNHttpClientConnectionManager createConnectionManager(SSLIOSessionStrategy sslStrategy) { + ConnectingIOReactor ioReactor; + try { + var configBuilder = IOReactorConfig.custom().setSoKeepAlive(true); + ioReactor = new DefaultConnectingIOReactor(configBuilder.build()); + } catch (IOReactorException e) { + var message = "Failed to initialize HTTP client manager with SSL."; + logger.error(message, e); + throw new ElasticsearchException(message, e); + } + + Registry registry = RegistryBuilder.create() + .register("http", NoopIOSessionStrategy.INSTANCE) + .register("https", sslStrategy) + .build(); + + return new PoolingNHttpClientConnectionManager(ioReactor, registry); + } + private static PoolingNHttpClientConnectionManager createConnectionManager() { ConnectingIOReactor ioReactor; try { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 9ccb93a0858ae..e5c29adeb9176 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -7,13 +7,14 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ListenerTimeouts; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.util.Objects; @@ -64,7 +65,10 @@ private ActionListener getListener( threadPool.executor(UTILITY_THREAD_POOL_NAME), notificationListener, (ignored) -> notificationListener.onFailure( - new ElasticsearchTimeoutException(Strings.format("Request timed out waiting to be sent after [%s]", timeout)) + new ElasticsearchStatusException( + Strings.format("Request timed out waiting to be sent after [%s]", timeout), + RestStatus.REQUEST_TIMEOUT + ) ) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java new file mode 100644 index 0000000000000..a4a8123935c3e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.queries; + +import org.elasticsearch.action.ResolvedIndices; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.index.mapper.IndexFieldMapper; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +public class SemanticMatchQueryRewriteInterceptor implements QueryRewriteInterceptor { + + public static final NodeFeature SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED = new NodeFeature( + "search.semantic_match_query_rewrite_interception_supported" + ); + + public SemanticMatchQueryRewriteInterceptor() {} + + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + assert (queryBuilder instanceof MatchQueryBuilder); + MatchQueryBuilder matchQueryBuilder = (MatchQueryBuilder) queryBuilder; + QueryBuilder rewritten = queryBuilder; + ResolvedIndices resolvedIndices = context.getResolvedIndices(); + if (resolvedIndices != null) { + Collection indexMetadataCollection = resolvedIndices.getConcreteLocalIndicesMetadata().values(); + List inferenceIndices = new ArrayList<>(); + List nonInferenceIndices = new ArrayList<>(); + for (IndexMetadata indexMetadata : indexMetadataCollection) { + String indexName = indexMetadata.getIndex().getName(); + InferenceFieldMetadata inferenceFieldMetadata = indexMetadata.getInferenceFields().get(matchQueryBuilder.fieldName()); + if (inferenceFieldMetadata != null) { + inferenceIndices.add(indexName); + } else { + nonInferenceIndices.add(indexName); + } + } + + if (inferenceIndices.isEmpty()) { + return rewritten; + } else if (nonInferenceIndices.isEmpty() == false) { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + for (String inferenceIndexName : inferenceIndices) { + // Add a separate clause for each semantic query, because they may be using different inference endpoints + // TODO - consolidate this to a single clause once the semantic query supports multiple inference endpoints + boolQueryBuilder.should( + createSemanticSubQuery(inferenceIndexName, matchQueryBuilder.fieldName(), (String) matchQueryBuilder.value()) + ); + } + boolQueryBuilder.should(createMatchSubQuery(nonInferenceIndices, matchQueryBuilder)); + rewritten = boolQueryBuilder; + } else { + rewritten = new SemanticQueryBuilder(matchQueryBuilder.fieldName(), (String) matchQueryBuilder.value(), false); + } + } + + return rewritten; + + } + + @Override + public String getQueryName() { + return MatchQueryBuilder.NAME; + } + + private QueryBuilder createSemanticSubQuery(String indexName, String fieldName, String value) { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must(new SemanticQueryBuilder(fieldName, value, true)); + boolQueryBuilder.filter(new TermQueryBuilder(IndexFieldMapper.NAME, indexName)); + return boolQueryBuilder; + } + + private QueryBuilder createMatchSubQuery(List indices, MatchQueryBuilder matchQueryBuilder) { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must(matchQueryBuilder); + boolQueryBuilder.filter(new TermsQueryBuilder(IndexFieldMapper.NAME, indices)); + return boolQueryBuilder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index d648db2fbfdbc..2a34651efcd9d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -46,6 +46,7 @@ import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -57,16 +58,18 @@ public class SemanticQueryBuilder extends AbstractQueryBuilder PARSER = new ConstructingObjectParser<>( NAME, false, - args -> new SemanticQueryBuilder((String) args[0], (String) args[1]) + args -> new SemanticQueryBuilder((String) args[0], (String) args[1], (Boolean) args[2]) ); static { PARSER.declareString(constructorArg(), FIELD_FIELD); PARSER.declareString(constructorArg(), QUERY_FIELD); + PARSER.declareBoolean(optionalConstructorArg(), LENIENT_FIELD); declareStandardFields(PARSER); } @@ -75,8 +78,13 @@ public class SemanticQueryBuilder extends AbstractQueryBuilder inferenceResultsSupplier; private final InferenceResults inferenceResults; private final boolean noInferenceResults; + private final Boolean lenient; public SemanticQueryBuilder(String fieldName, String query) { + this(fieldName, query, null); + } + + public SemanticQueryBuilder(String fieldName, String query, Boolean lenient) { if (fieldName == null) { throw new IllegalArgumentException("[" + NAME + "] requires a " + FIELD_FIELD.getPreferredName() + " value"); } @@ -88,6 +96,7 @@ public SemanticQueryBuilder(String fieldName, String query) { this.inferenceResults = null; this.inferenceResultsSupplier = null; this.noInferenceResults = false; + this.lenient = lenient; } public SemanticQueryBuilder(StreamInput in) throws IOException { @@ -97,6 +106,11 @@ public SemanticQueryBuilder(StreamInput in) throws IOException { this.inferenceResults = in.readOptionalNamedWriteable(InferenceResults.class); this.noInferenceResults = in.readBoolean(); this.inferenceResultsSupplier = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.SEMANTIC_QUERY_LENIENT)) { + this.lenient = in.readOptionalBoolean(); + } else { + this.lenient = null; + } } @Override @@ -108,6 +122,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(query); out.writeOptionalNamedWriteable(inferenceResults); out.writeBoolean(noInferenceResults); + if (out.getTransportVersion().onOrAfter(TransportVersions.SEMANTIC_QUERY_LENIENT)) { + out.writeOptionalBoolean(lenient); + } } private SemanticQueryBuilder( @@ -123,6 +140,7 @@ private SemanticQueryBuilder( this.inferenceResultsSupplier = inferenceResultsSupplier; this.inferenceResults = inferenceResults; this.noInferenceResults = noInferenceResults; + this.lenient = other.lenient; } @Override @@ -144,6 +162,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.startObject(NAME); builder.field(FIELD_FIELD.getPreferredName(), fieldName); builder.field(QUERY_FIELD.getPreferredName(), query); + if (lenient != null) { + builder.field(LENIENT_FIELD.getPreferredName(), lenient); + } boostAndQueryNameToXContent(builder); builder.endObject(); } @@ -171,6 +192,8 @@ private QueryBuilder doRewriteBuildSemanticQuery(SearchExecutionContext searchEx } return semanticTextFieldType.semanticQuery(inferenceResults, searchExecutionContext.requestSize(), boost(), queryName()); + } else if (lenient != null && lenient) { + return new MatchNoneQueryBuilder(); } else { throw new IllegalArgumentException( "Field [" + fieldName + "] of type [" + fieldType.typeName() + "] does not support " + NAME + " queries" diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index fd2427dc8ac6a..f54696895a818 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -12,6 +12,7 @@ import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; @@ -21,7 +22,6 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.XPackPlugin; import java.io.IOException; import java.util.List; @@ -73,8 +73,11 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder RetrieverBuilder.declareBaseParserFields(TextSimilarityRankBuilder.NAME, PARSER); } - public static TextSimilarityRankRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) - throws IOException { + public static TextSimilarityRankRetrieverBuilder fromXContent( + XContentParser parser, + RetrieverParserContext context, + XPackLicenseState licenceState + ) throws IOException { if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED) == false) { throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + TextSimilarityRankBuilder.NAME + "]"); } @@ -83,7 +86,7 @@ public static TextSimilarityRankRetrieverBuilder fromXContent(XContentParser par "[text_similarity_reranker] retriever composition feature is not supported by all nodes in the cluster" ); } - if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { + if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(licenceState) == false) { throw LicenseUtils.newComplianceException(TextSimilarityRankBuilder.NAME); } return PARSER.apply(parser, context); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index bc2daddc2a346..431a3647e2879 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -9,7 +9,9 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; +import java.util.ArrayList; import java.util.List; public class ElasticInferenceServiceSettings { @@ -17,6 +19,8 @@ public class ElasticInferenceServiceSettings { @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; + static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( "xpack.inference.elastic.url", Setting.Property.NodeScope @@ -31,11 +35,27 @@ public class ElasticInferenceServiceSettings { public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); - } + public static final SSLConfigurationSettings ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS = SSLConfigurationSettings.withPrefix( + ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX, + false + ); + + public static final Setting ELASTIC_INFERENCE_SERVICE_SSL_ENABLED = Setting.boolSetting( + ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX + "enabled", + true, + Setting.Property.NodeScope + ); + public static List> getSettingsDefinitions() { - return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); + ArrayList> settings = new ArrayList<>(); + settings.add(EIS_GATEWAY_URL); + settings.add(ELASTIC_INFERENCE_SERVICE_URL); + settings.add(ELASTIC_INFERENCE_SERVICE_SSL_ENABLED); + settings.addAll(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS.getEnabledSettings()); + + return settings; } @Deprecated diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java index bfec2d5ac3484..61033a0211065 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.inference.InferencePlugin; import org.hamcrest.Matchers; @@ -28,7 +29,7 @@ public class SemanticTextClusterMetadataTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(InferencePlugin.class); + return List.of(XPackPlugin.class, InferencePlugin.class); } public void testCreateIndexWithSemanticTextField() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java new file mode 100644 index 0000000000000..d1db5b8b12cc6 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; +import org.junit.After; +import org.junit.Before; + +import static org.hamcrest.Matchers.is; + +public class InferencePluginTests extends ESTestCase { + private InferencePlugin inferencePlugin; + + private Boolean elasticInferenceServiceEnabled = true; + + private void setElasticInferenceServiceEnabled(Boolean elasticInferenceServiceEnabled) { + this.elasticInferenceServiceEnabled = elasticInferenceServiceEnabled; + } + + @Before + public void setUp() throws Exception { + super.setUp(); + + Settings settings = Settings.builder().build(); + inferencePlugin = new InferencePlugin(settings) { + @Override + protected Boolean isElasticInferenceServiceEnabled() { + return elasticInferenceServiceEnabled; + } + }; + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + } + + public void testElasticInferenceServiceSettingsPresent() throws Exception { + setElasticInferenceServiceEnabled(true); // enable elastic inference service + boolean anyMatch = inferencePlugin.getSettings() + .stream() + .map(Setting::getKey) + .anyMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); + + assertThat("xpack.inference.elastic settings are present", anyMatch, is(true)); + } + + public void testElasticInferenceServiceSettingsNotPresent() throws Exception { + setElasticInferenceServiceEnabled(false); // disable elastic inference service + boolean noneMatch = inferencePlugin.getSettings() + .stream() + .map(Setting::getKey) + .noneMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); + + assertThat("xpack.inference.elastic settings are not present", noneMatch, is(true)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java new file mode 100644 index 0000000000000..68ea175bd9870 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.action.support.MappedActionFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; +import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; + +import java.nio.file.Path; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static java.util.stream.Collectors.toList; + +public class LocalStateInferencePlugin extends LocalStateCompositeXPackPlugin { + private final InferencePlugin inferencePlugin; + + public LocalStateInferencePlugin(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + LocalStateInferencePlugin thisVar = this; + this.inferencePlugin = new InferencePlugin(settings) { + @Override + protected SSLService getSslService() { + return thisVar.getSslService(); + } + + @Override + protected XPackLicenseState getLicenseState() { + return thisVar.getLicenseState(); + } + + @Override + public List getInferenceServiceFactories() { + return List.of( + TestSparseInferenceServiceExtension.TestInferenceService::new, + TestDenseInferenceServiceExtension.TestInferenceService::new + ); + } + }; + plugins.add(inferencePlugin); + } + + @Override + public List> getRetrievers() { + return this.filterPlugins(SearchPlugin.class).stream().flatMap(p -> p.getRetrievers().stream()).collect(toList()); + } + + @Override + public Map getMappers() { + return inferencePlugin.getMappers(); + } + + @Override + public Collection getMappedActionFilters() { + return inferencePlugin.getMappedActionFilters(); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index 9395ae222e9ba..0f322e64755be 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; -import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -143,20 +142,6 @@ private static void blockingCall( latch.await(); } - public static class TestInferencePlugin extends InferencePlugin { - public TestInferencePlugin(Settings settings) { - super(settings); - } - - @Override - public List getInferenceServiceFactories() { - return List.of( - TestSparseInferenceServiceExtension.TestInferenceService::new, - TestDenseInferenceServiceExtension.TestInferenceService::new - ); - } - } - public static Model getInvalidModel(String inferenceEntityId, String serviceName) { var mockConfigs = mock(ModelConfigurations.class); when(mockConfigs.getInferenceEntityId()).thenReturn(inferenceEntityId); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index 2416aeb62ff33..c68a629b999c5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -102,7 +102,7 @@ public void testFilterNoop() throws Exception { new BulkItemRequest[0] ); request.setInferenceFieldMap( - Map.of("foo", new InferenceFieldMetadata("foo", "bar", generateRandomStringArray(5, 10, false, false))) + Map.of("foo", new InferenceFieldMetadata("foo", "bar", "baz", generateRandomStringArray(5, 10, false, false))) ); filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java index 79f6aa8164b75..b3e7db6009204 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.inference.external.http.sender; import org.apache.http.HttpHeaders; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -162,12 +162,13 @@ public void testHttpRequestSender_Throws_WhenATimeoutOccurs() throws Exception { PlainActionFuture listener = new PlainActionFuture<>(); sender.send(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) ); + assertThat(thrownException.status().getStatus(), is(408)); } } @@ -187,12 +188,13 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws PlainActionFuture listener = new PlainActionFuture<>(); sender.send(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) ); + assertThat(thrownException.status().getStatus(), is(408)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index e09e4968571e5..7e29fad56812d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.inference.external.http.sender; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; @@ -238,12 +238,13 @@ public void testExecute_CallsOnFailure_WhenRequestTimesOut() { var listener = new PlainActionFuture(); service.execute(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) ); + assertThat(thrownException.status().getStatus(), is(408)); } public void testExecute_PreservesThreadContext() throws InterruptedException, ExecutionException, TimeoutException { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index c839c266e9320..e37a1a213569e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.core.TimeValue; @@ -86,13 +86,14 @@ public void testRequest_ReturnsTimeoutException() { listener ); - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueMillis(1))) ); assertTrue(requestTask.hasCompleted()); assertTrue(requestTask.getRequestCompletedFunction().get()); + assertThat(thrownException.status().getStatus(), is(408)); } public void testRequest_DoesNotCallOnFailureTwiceWhenTimingOut() throws Exception { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java index 1f58c4165056d..24183b21f73e7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.index.mapper.NonDynamicFieldMapperTests; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; import org.junit.Before; @@ -26,7 +27,7 @@ public void setup() throws Exception { @Override protected Collection> getPlugins() { - return List.of(Utils.TestInferencePlugin.class); + return List.of(LocalStateInferencePlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java index 6d6403b69ea11..daed03c198e0d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import java.util.Collection; import java.util.List; @@ -40,7 +40,7 @@ protected RankBuilder getThrowingRankBuilder(int rankWindowSize, String rankFeat @Override protected Collection> pluginsNeeded() { - return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } public void testQueryPhaseShardThrowingAllShardsFail() throws Exception { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java index 084a7f3de4a53..ba6924ba0ff3b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java @@ -24,8 +24,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.junit.Before; import java.io.IOException; @@ -47,7 +46,7 @@ protected boolean addMockHttpTransport() { @Override protected Collection> nodePlugins() { - return List.of(InferencePlugin.class, XPackPlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java index a042fca44fdb5..f81f2965c392e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.junit.Before; import java.util.Collection; @@ -108,7 +108,7 @@ protected InferenceAction.Request generateRequest(List docFeatures) { @Override protected Collection> getPlugins() { - return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } @Before diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index c2704a4c22914..3d3790d879ef1 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -101,7 +101,7 @@ setup: index: test-sparse-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -132,7 +132,7 @@ setup: index: test-sparse-index id: doc_1 body: - inference_field: [40, 49.678] + inference_field: [ 40, 49.678 ] refresh: true - do: @@ -229,7 +229,7 @@ setup: index: test-dense-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -260,7 +260,7 @@ setup: index: test-dense-index id: doc_1 body: - inference_field: [45.1, 100] + inference_field: [ 45.1, 100 ] refresh: true - do: @@ -387,7 +387,7 @@ setup: index: test-dense-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -418,7 +418,7 @@ setup: index: test-sparse-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -440,7 +440,7 @@ setup: - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 3.783733e19, error: 1e13 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } - - match: { hits.hits.0.matched_queries: ["i-like-naming-my-queries"] } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } --- "Query an index alias": @@ -452,7 +452,7 @@ setup: index: test-sparse-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -503,6 +503,48 @@ setup: - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.root_cause.0.reason: "Field [non_inference_field] of type [text] does not support semantic queries" } +--- +"Query the wrong field type with lenient: true": + - requires: + cluster_features: "search.semantic_match_query_rewrite_interception_supported" + reason: lenient introduced in 8.18.0 + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: "inference test" + non_inference_field: "non inference test" + refresh: true + + - do: + catch: bad_request + search: + index: test-sparse-index + body: + query: + semantic: + field: "non_inference_field" + query: "inference test" + + - match: { error.type: "search_phase_execution_exception" } + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "Field [non_inference_field] of type [text] does not support semantic queries" } + + - do: + search: + index: test-sparse-index + body: + query: + semantic: + field: "non_inference_field" + query: "inference test" + lenient: true + + - match: { hits.total.value: 0 } + + --- "Query a missing field": - do: @@ -783,7 +825,7 @@ setup: index: test-dense-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -844,11 +886,11 @@ setup: "Query a field that uses the default ELSER 2 endpoint": - requires: reason: "default ELSER 2 inference ID is enabled via a capability" - test_runner_features: [capabilities] + test_runner_features: [ capabilities ] capabilities: - method: GET path: /_inference - capabilities: [default_elser_2] + capabilities: [ default_elser_2 ] - do: indices.create: diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml new file mode 100644 index 0000000000000..cdbf73d31a272 --- /dev/null +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml @@ -0,0 +1,284 @@ +setup: + - requires: + cluster_features: "search.semantic_match_query_rewrite_interception_supported" + reason: semantic_text match support introduced in 8.18.0 + + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id-2 + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "api_key": "abc64", + "similarity": "COSINE" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-sparse-index + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + non_inference_field: + type: text + + - do: + indices.create: + index: test-dense-index + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + indices.create: + index: test-text-only-index + body: + mappings: + properties: + inference_field: + type: text + non_inference_field: + type: text + +--- +"Query using a sparse embedding model": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + +--- +"Query using a dense embedding model": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-dense-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + +--- +"Query an index alias": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + indices.put_alias: + index: test-sparse-index + name: my-alias + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: my-alias + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + +--- +"Query indices with both semantic_text and regular text content": + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + index: + index: test-text-only-index + id: doc_2 + body: + inference_field: [ "inference test", "not an inference field" ] + non_inference_field: "non inference test" + refresh: true + + - do: + search: + index: + - test-sparse-index + - test-text-only-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_2" } + + # Test querying multiple indices that either use the same inference ID or combine semantic_text with lexical search + - do: + indices.create: + index: test-sparse-index-2 + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + non_inference_field: + type: text + + - do: + index: + index: test-sparse-index-2 + id: doc_3 + body: + inference_field: "another inference test" + refresh: true + + - do: + search: + index: + - test-sparse-index* + - test-text-only-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 3 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_3" } + - match: { hits.hits.2._id: "doc_2" } + +--- +"Query a field that has no indexed inference results": + - skip: + features: [ "headers" ] + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 0 } + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 0 } diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java index 7f2243ed76849..6e24e644cb2af 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java @@ -117,6 +117,23 @@ public void testAlreadyUpToDateDataStream() throws Exception { assertThat(status.totalIndices(), equalTo(backingIndexCount)); assertThat(status.totalIndicesToBeUpgraded(), equalTo(0)); }); + AcknowledgedResponse cancelResponse = client().execute( + CancelReindexDataStreamAction.INSTANCE, + new CancelReindexDataStreamAction.Request(dataStreamName) + ).actionGet(); + assertNotNull(cancelResponse); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute(CancelReindexDataStreamAction.INSTANCE, new CancelReindexDataStreamAction.Request(dataStreamName)) + .actionGet() + ); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute( + new ActionType(GetMigrationReindexStatusAction.NAME), + new GetMigrationReindexStatusAction.Request(dataStreamName) + ).actionGet() + ); } private int createDataStream(String dataStreamName) { diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java index 1af66a2c61d56..26f8e57102a4d 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java @@ -32,10 +32,13 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamTransportAction; import org.elasticsearch.xpack.migrate.action.GetMigrationReindexStatusAction; import org.elasticsearch.xpack.migrate.action.GetMigrationReindexStatusTransportAction; import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction; import org.elasticsearch.xpack.migrate.action.ReindexDataStreamTransportAction; +import org.elasticsearch.xpack.migrate.rest.RestCancelReindexDataStreamAction; import org.elasticsearch.xpack.migrate.rest.RestGetMigrationReindexStatusAction; import org.elasticsearch.xpack.migrate.rest.RestMigrationReindexAction; import org.elasticsearch.xpack.migrate.task.ReindexDataStreamPersistentTaskExecutor; @@ -69,6 +72,7 @@ public List getRestHandlers( if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { handlers.add(new RestMigrationReindexAction()); handlers.add(new RestGetMigrationReindexStatusAction()); + handlers.add(new RestCancelReindexDataStreamAction()); } return handlers; } @@ -79,6 +83,7 @@ public List getRestHandlers( if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class)); actions.add(new ActionHandler<>(GetMigrationReindexStatusAction.INSTANCE, GetMigrationReindexStatusTransportAction.class)); + actions.add(new ActionHandler<>(CancelReindexDataStreamAction.INSTANCE, CancelReindexDataStreamTransportAction.class)); } return actions; } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamAction.java new file mode 100644 index 0000000000000..635d8b8f30978 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamAction.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +public class CancelReindexDataStreamAction extends ActionType { + + public static final CancelReindexDataStreamAction INSTANCE = new CancelReindexDataStreamAction(); + public static final String NAME = "indices:admin/data_stream/reindex_cancel"; + + public CancelReindexDataStreamAction() { + super(NAME); + } + + public static class Request extends ActionRequest implements IndicesRequest { + private final String index; + + public Request(String index) { + super(); + this.index = index; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.index = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(index); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean getShouldStoreResult() { + return true; + } + + public String getIndex() { + return index; + } + + @Override + public int hashCode() { + return Objects.hashCode(index); + } + + @Override + public boolean equals(Object other) { + return other instanceof Request && index.equals(((Request) other).index); + } + + public Request nodeRequest(String thisNodeId, long thisTaskId) { + Request copy = new Request(index); + copy.setParentTask(thisNodeId, thisTaskId); + return copy; + } + + @Override + public String[] indices() { + return new String[] { index }; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamTransportAction.java new file mode 100644 index 0000000000000..00a846bf7eb9a --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamTransportAction.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction.Request; + +public class CancelReindexDataStreamTransportAction extends HandledTransportAction { + private final PersistentTasksService persistentTasksService; + + @Inject + public CancelReindexDataStreamTransportAction( + TransportService transportService, + ActionFilters actionFilters, + PersistentTasksService persistentTasksService + ) { + super(CancelReindexDataStreamAction.NAME, transportService, actionFilters, Request::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + this.persistentTasksService = persistentTasksService; + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + String index = request.getIndex(); + String persistentTaskId = ReindexDataStreamAction.TASK_ID_PREFIX + index; + /* + * This removes the persistent task from the cluster state and results in the running task being cancelled (but not removed from + * the task manager). The running task is removed from the task manager in ReindexDataStreamTask::onCancelled, which is called as + * as result of this. + */ + persistentTasksService.sendRemoveRequest(persistentTaskId, TimeValue.MAX_VALUE, new ActionListener<>() { + @Override + public void onResponse(PersistentTasksCustomMetadata.PersistentTask persistentTask) { + listener.onResponse(AcknowledgedResponse.TRUE); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestCancelReindexDataStreamAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestCancelReindexDataStreamAction.java new file mode 100644 index 0000000000000..0bd68e8b2df73 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestCancelReindexDataStreamAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestCancelReindexDataStreamAction extends BaseRestHandler { + + @Override + public String getName() { + return "cancel_reindex_data_stream_action"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/_migration/reindex/{index}/_cancel")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + String index = request.param("index"); + CancelReindexDataStreamAction.Request cancelTaskRequest = new CancelReindexDataStreamAction.Request(index); + return channel -> client.execute(CancelReindexDataStreamAction.INSTANCE, cancelTaskRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java index 7ec5014b9edff..176220a1ccae8 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java @@ -91,13 +91,11 @@ protected void nodeOperation(AllocatedPersistentTask task, ReindexDataStreamTask } private void completeSuccessfulPersistentTask(ReindexDataStreamTask persistentTask) { - persistentTask.allReindexesCompleted(); - threadPool.schedule(persistentTask::markAsCompleted, getTimeToLive(persistentTask), threadPool.generic()); + persistentTask.allReindexesCompleted(threadPool, getTimeToLive(persistentTask)); } private void completeFailedPersistentTask(ReindexDataStreamTask persistentTask, Exception e) { - persistentTask.taskFailed(e); - threadPool.schedule(() -> persistentTask.markAsFailed(e), getTimeToLive(persistentTask), threadPool.generic()); + persistentTask.taskFailed(threadPool, getTimeToLive(persistentTask), e); } private TimeValue getTimeToLive(ReindexDataStreamTask reindexDataStreamTask) { diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java index 72ddb87e9dea5..844f24f45ab77 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.migrate.task; +import org.elasticsearch.common.util.concurrent.RunOnce; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.List; @@ -21,12 +24,14 @@ public class ReindexDataStreamTask extends AllocatedPersistentTask { private final long persistentTaskStartTime; private final int totalIndices; private final int totalIndicesToBeUpgraded; - private boolean complete = false; - private Exception exception; - private AtomicInteger inProgress = new AtomicInteger(0); - private AtomicInteger pending = new AtomicInteger(); - private List> errors = new ArrayList<>(); + private volatile boolean complete = false; + private volatile Exception exception; + private final AtomicInteger inProgress = new AtomicInteger(0); + private final AtomicInteger pending = new AtomicInteger(); + private final List> errors = new ArrayList<>(); + private final RunOnce completeTask; + @SuppressWarnings("this-escape") public ReindexDataStreamTask( long persistentTaskStartTime, int totalIndices, @@ -42,6 +47,13 @@ public ReindexDataStreamTask( this.persistentTaskStartTime = persistentTaskStartTime; this.totalIndices = totalIndices; this.totalIndicesToBeUpgraded = totalIndicesToBeUpgraded; + this.completeTask = new RunOnce(() -> { + if (exception == null) { + markAsCompleted(); + } else { + markAsFailed(exception); + } + }); } @Override @@ -58,13 +70,18 @@ public ReindexDataStreamStatus getStatus() { ); } - public void allReindexesCompleted() { + public void allReindexesCompleted(ThreadPool threadPool, TimeValue timeToLive) { this.complete = true; + if (isCancelled()) { + completeTask.run(); + } else { + threadPool.schedule(completeTask, timeToLive, threadPool.generic()); + } } - public void taskFailed(Exception e) { - this.complete = true; + public void taskFailed(ThreadPool threadPool, TimeValue timeToLive, Exception e) { this.exception = e; + allReindexesCompleted(threadPool, timeToLive); } public void reindexSucceeded() { @@ -84,4 +101,16 @@ public void incrementInProgressIndicesCount() { public void setPendingIndicesCount(int size) { pending.set(size); } + + @Override + public void onCancelled() { + /* + * If the task is complete, but just waiting for its scheduled removal, we go ahead and call markAsCompleted/markAsFailed + * immediately. This results in the running task being removed from the task manager. If the task is not complete, then one of + * allReindexesCompleted or taskFailed will be called in the future, resulting in the same thing. + */ + if (complete) { + completeTask.run(); + } + } } diff --git a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamRequestTests.java b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamRequestTests.java new file mode 100644 index 0000000000000..187561dae19b0 --- /dev/null +++ b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamRequestTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction.Request; + +import java.io.IOException; + +public class CancelReindexDataStreamRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return Request::new; + } + + @Override + protected Request createTestInstance() { + return new Request(randomAlphaOfLength(30)); + } + + @Override + protected Request mutateInstance(Request instance) throws IOException { + return new Request(instance.getIndex() + randomAlphaOfLength(5)); + } +} diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java index 08fda90f9fd73..8fe87b043c78b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java @@ -195,7 +195,7 @@ public void testOpenJobWithOldSnapshot() { assertThat( ex.getMessage(), containsString( - "[open-job-with-old-model-snapshot] job model snapshot [snap_1] has min version before [7.0.0], " + "[open-job-with-old-model-snapshot] job model snapshot [snap_1] has min version before [8.3.0], " + "please revert to a newer model snapshot or reset the job" ) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index bd628c4e04ac6..6da5a110defbf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -58,8 +58,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_SUPPORTED_SNAPSHOT_VERSION; import static org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutor.checkAssignmentState; /* @@ -214,7 +213,7 @@ public void onFailure(Exception e) { return; } assert modelSnapshot.getPage().results().size() == 1; - if (modelSnapshot.getPage().results().get(0).getMinVersion().onOrAfter(MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION)) { + if (modelSnapshot.getPage().results().get(0).getMinVersion().onOrAfter(MIN_SUPPORTED_SNAPSHOT_VERSION)) { modelSnapshotValidationListener.onResponse(true); return; } @@ -224,7 +223,7 @@ public void onFailure(Exception e) { + "please revert to a newer model snapshot or reset the job", jobParams.getJobId(), jobParams.getJob().getModelSnapshotId(), - MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION.toString() + MIN_SUPPORTED_SNAPSHOT_VERSION.toString() ) ); }, failure -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 89180cba77dfd..9c37ebc0abfd8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -73,8 +73,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_SUPPORTED_SNAPSHOT_VERSION; import static org.elasticsearch.xpack.core.ml.MlTasks.AWAITING_UPGRADE; import static org.elasticsearch.xpack.core.ml.MlTasks.PERSISTENT_TASK_MASTER_NODE_TIMEOUT; import static org.elasticsearch.xpack.ml.job.JobNodeSelector.AWAITING_LAZY_ASSIGNMENT; @@ -436,7 +435,7 @@ private void verifyCurrentSnapshotVersion(String jobId, ActionListener } assert snapshot.getPage().results().size() == 1; ModelSnapshot snapshotObj = snapshot.getPage().results().get(0); - if (snapshotObj.getMinVersion().onOrAfter(MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION)) { + if (snapshotObj.getMinVersion().onOrAfter(MIN_SUPPORTED_SNAPSHOT_VERSION)) { listener.onResponse(true); return; } @@ -446,7 +445,7 @@ private void verifyCurrentSnapshotVersion(String jobId, ActionListener + "please revert to a newer model snapshot or reset the job", jobId, jobSnapshotId, - MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION.toString() + MIN_SUPPORTED_SNAPSHOT_VERSION.toString() ) ); }, snapshotFailure -> { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java index bab012afc3101..ff1a1d19779df 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.monitoring.Monitoring; import org.elasticsearch.xpack.security.Security; @@ -86,6 +87,12 @@ protected XPackLicenseState getLicenseState() { } }); plugins.add(new MockedRollupPlugin()); + plugins.add(new InferencePlugin(settings) { + @Override + protected SSLService getSslService() { + return thisVar.getSslService(); + } + }); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index aeebfabdce704..5cf15454e47f2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -82,7 +82,6 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import org.elasticsearch.xpack.ilm.IndexLifecycle; -import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; @@ -161,8 +160,7 @@ protected Collection> nodePlugins() { DataStreamsPlugin.class, // To remove errors from parsing build in templates that contain scaled_float MapperExtrasPlugin.class, - Wildcard.class, - InferencePlugin.class + Wildcard.class ); } diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java index 71f788727aa23..a24c673c1aef8 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java @@ -94,11 +94,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna .put( IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), metadata.settings() - .getAsVersionId( - "version", - IndexVersion::fromId, - IndexVersion.fromId(randomFrom(5000099, 6000099, 7000099)) - ) + .getAsVersionId("version", IndexVersion::fromId, IndexVersion.fromId(randomFrom(5000099, 6000099))) ) ) .build(); diff --git a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderTests.java b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderTests.java index d20f0f88aeb16..bdd6d73ec0fbf 100644 --- a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderTests.java +++ b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderTests.java @@ -54,7 +54,9 @@ public void testRetrieverExtractionErrors() throws IOException { IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> ssb.parseXContent(parser, true, nf -> true) - .rewrite(new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")))) + .rewrite( + new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")), null) + ) ); assertEquals("[search_after] cannot be used in children of compound retrievers", iae.getMessage()); } @@ -70,7 +72,9 @@ public void testRetrieverExtractionErrors() throws IOException { IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> ssb.parseXContent(parser, true, nf -> true) - .rewrite(new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")))) + .rewrite( + new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")), null) + ) ); assertEquals("[terminate_after] cannot be used in children of compound retrievers", iae.getMessage()); } diff --git a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java index ab5be0f48f5f3..057ebdece5c61 100644 --- a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java +++ b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.frozen.FrozenIndices; import org.elasticsearch.xpack.graph.Graph; import org.elasticsearch.xpack.ilm.IndexLifecycle; -import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.profiling.ProfilingPlugin; import org.elasticsearch.xpack.rollup.Rollup; import org.elasticsearch.xpack.search.AsyncSearch; @@ -89,7 +88,6 @@ protected Collection> getPlugins() { FrozenIndices.class, Graph.class, IndexLifecycle.class, - InferencePlugin.class, IngestCommonPlugin.class, IngestTestPlugin.class, MustachePlugin.class, diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 09449f81121fd..d6bad85161fd9 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.remotecluster; +import org.apache.http.client.methods.HttpGet; import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -22,6 +23,7 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.junit.RunnableTestRuleAdapter; +import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.After; @@ -34,6 +36,7 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Base64; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -51,6 +54,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.not; public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTestCase { private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); @@ -342,6 +346,14 @@ public void testCrossClusterQuery() throws Exception { configureRemoteCluster(); populateData(); + Map esqlCcsLicenseFeatureUsage = fetchEsqlCcsFeatureUsageFromNode(client()); + + Object ccsLastUsedTimestampAtStartOfTest = null; + if (esqlCcsLicenseFeatureUsage.isEmpty() == false) { + // some test runs will have a usage value already, so capture that to compare at end of test + ccsLastUsedTimestampAtStartOfTest = esqlCcsLicenseFeatureUsage.get("last_used"); + } + // query remote cluster only Request request = esqlRequest(""" FROM my_remote_cluster:employees @@ -385,6 +397,15 @@ public void testCrossClusterQuery() throws Exception { | LIMIT 2 | KEEP emp_id, department""")); assertRemoteOnlyAgainst2IndexResults(response); + + // check that the esql-ccs license feature is now present and that the last_used field has been updated + esqlCcsLicenseFeatureUsage = fetchEsqlCcsFeatureUsageFromNode(client()); + assertThat(esqlCcsLicenseFeatureUsage.size(), equalTo(5)); + Object lastUsed = esqlCcsLicenseFeatureUsage.get("last_used"); + assertNotNull("lastUsed should not be null", lastUsed); + if (ccsLastUsedTimestampAtStartOfTest != null) { + assertThat(lastUsed.toString(), not(equalTo(ccsLastUsedTimestampAtStartOfTest.toString()))); + } } @SuppressWarnings("unchecked") @@ -1660,4 +1681,18 @@ void assertExpectedClustersForMissingIndicesTests(Map responseMa assertThat((int) shards.get("failed"), is(0)); } } + + private static Map fetchEsqlCcsFeatureUsageFromNode(RestClient client) throws IOException { + Request request = new Request(HttpGet.METHOD_NAME, "_license/feature_usage"); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", basicAuthHeaderValue(USER, PASS))); + Response response = client.performRequest(request); + ObjectPath path = ObjectPath.createFromResponse(response); + List> features = path.evaluate("features"); + for (var feature : features) { + if ("esql-ccs".equals(feature.get("name"))) { + return feature; + } + } + return Collections.emptyMap(); + } } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index db87fdbcb8f1f..b139d1526ec20 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -639,6 +639,7 @@ public class Constants { "internal:index/metadata/migration_version/update", new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/migration/reindex_status" : null, new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/data_stream/reindex" : null, + new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/data_stream/reindex_cancel" : null, "internal:admin/repository/verify", "internal:admin/repository/verify/coordinate" ).filter(Objects::nonNull).collect(Collectors.toUnmodifiableSet()); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 81f65668722fc..2a4cde9a680e9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -92,7 +92,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} # Testing for the entire function set isn't feasbile, so we just check that we return the correct count as an approximation. - - length: {esql.functions: 128} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 129} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version": @@ -163,4 +163,4 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 124} # check the "sister" test above for a likely update to the same esql.functions length check + - length: {esql.functions: 125} # check the "sister" test above for a likely update to the same esql.functions length check diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml index f50a7a65f53d3..9fb33b43f042f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml @@ -104,14 +104,23 @@ setup: name: my-data-stream - is_true: acknowledged -# Uncomment once the cancel API is in place -# - do: -# migrate.reindex: -# body: | -# { -# "mode": "upgrade", -# "source": { -# "index": "my-data-stream" -# } -# } -# - match: { task: "reindex-data-stream-my-data-stream" } + - do: + migrate.reindex: + body: | + { + "mode": "upgrade", + "source": { + "index": "my-data-stream" + } + } + - match: { acknowledged: true } + + - do: + migrate.cancel_reindex: + index: "my-data-stream" + - match: { acknowledged: true } + + - do: + catch: /resource_not_found_exception/ + migrate.cancel_reindex: + index: "my-data-stream" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml index ae343a0b4db95..c94ce8dd211ae 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml @@ -46,25 +46,39 @@ setup: name: my-data-stream - is_true: acknowledged -# Uncomment once the cancel API is in place -# - do: -# migrate.reindex: -# body: | -# { -# "mode": "upgrade", -# "source": { -# "index": "my-data-stream" -# } -# } -# - match: { acknowledged: true } -# -# - do: -# migrate.get_reindex_status: -# index: "my-data-stream" -# - match: { complete: true } -# - match: { total_indices: 1 } -# - match: { total_indices_requiring_upgrade: 0 } -# - match: { successes: 0 } -# - match: { in_progress: 0 } -# - match: { pending: 0 } -# - match: { errors: [] } + - do: + migrate.reindex: + body: | + { + "mode": "upgrade", + "source": { + "index": "my-data-stream" + } + } + - match: { acknowledged: true } + + - do: + migrate.get_reindex_status: + index: "my-data-stream" + - match: { complete: true } + - match: { total_indices: 1 } + - match: { total_indices_requiring_upgrade: 0 } + - match: { successes: 0 } + - match: { in_progress: 0 } + - match: { pending: 0 } + - match: { errors: [] } + + - do: + migrate.cancel_reindex: + index: "my-data-stream" + - match: { acknowledged: true } + + - do: + catch: /resource_not_found_exception/ + migrate.cancel_reindex: + index: "my-data-stream" + + - do: + catch: /resource_not_found_exception/ + migrate.get_reindex_status: + index: "my-data-stream" diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandler.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandler.java index 337d3c5820c07..24586e5f36337 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandler.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandler.java @@ -169,7 +169,14 @@ private void handleScriptException(ScriptException scriptException, boolean unat * @param numFailureRetries the number of configured retries */ private void handleBulkIndexingException(BulkIndexingException bulkIndexingException, boolean unattended, int numFailureRetries) { - if (unattended == false && bulkIndexingException.isIrrecoverable()) { + if (bulkIndexingException.getCause() instanceof ClusterBlockException) { + retryWithoutIncrementingFailureCount( + bulkIndexingException, + bulkIndexingException.getDetailedMessage(), + unattended, + numFailureRetries + ); + } else if (unattended == false && bulkIndexingException.isIrrecoverable()) { String message = TransformMessages.getMessage( TransformMessages.LOG_TRANSFORM_PIVOT_IRRECOVERABLE_BULK_INDEXING_ERROR, bulkIndexingException.getDetailedMessage() @@ -232,12 +239,46 @@ private void retry(Throwable unwrappedException, String message, boolean unatten && unwrappedException.getClass().equals(context.getLastFailure().getClass()); final int failureCount = context.incrementAndGetFailureCount(unwrappedException); - if (unattended == false && numFailureRetries != -1 && failureCount > numFailureRetries) { fail(unwrappedException, "task encountered more than " + numFailureRetries + " failures; latest failure: " + message); return; } + logRetry(unwrappedException, message, unattended, numFailureRetries, failureCount, repeatedFailure); + } + + /** + * Terminate failure handling without incrementing the retries used + *

    + * This is used when there is an ongoing recoverable issue and we want to retain + * retries for any issues that may occur after the issue is resolved + * + * @param unwrappedException The exception caught + * @param message error message to log/audit + * @param unattended whether the transform runs in unattended mode + * @param numFailureRetries the number of configured retries + */ + private void retryWithoutIncrementingFailureCount( + Throwable unwrappedException, + String message, + boolean unattended, + int numFailureRetries + ) { + // group failures to decide whether to report it below + final boolean repeatedFailure = context.getLastFailure() != null + && unwrappedException.getClass().equals(context.getLastFailure().getClass()); + + logRetry(unwrappedException, message, unattended, numFailureRetries, context.getFailureCount(), repeatedFailure); + } + + private void logRetry( + Throwable unwrappedException, + String message, + boolean unattended, + int numFailureRetries, + int failureCount, + boolean repeatedFailure + ) { // Since our schedule fires again very quickly after failures it is possible to run into the same failure numerous // times in a row, very quickly. We do not want to spam the audit log with repeated failures, so only record the first one // and if the number of retries is about to exceed diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandlerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandlerTests.java index 84c8d4e140408..3894ff3043ccd 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandlerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandlerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.transform.notifications.MockTransformAuditor; +import java.util.List; import java.util.Map; import java.util.Set; @@ -63,9 +64,121 @@ public int getFailureCountChangedCounter() { } } - public void testUnattended() { + public void testHandleIndexerFailure_CircuitBreakingExceptionNewPageSizeLessThanMinimumPageSize() { + var e = new CircuitBreakingException(randomAlphaOfLength(10), 1, 0, randomFrom(CircuitBreaker.Durability.values())); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_CircuitBreakingExceptionNewPageSizeNotLessThanMinimumPageSize() { + var e = new CircuitBreakingException(randomAlphaOfLength(10), 1, 1, randomFrom(CircuitBreaker.Durability.values())); + + List.of(true, false).forEach((unattended) -> { assertNoFailureAndContextPageSizeSet(e, unattended, 365); }); + } + + public void testHandleIndexerFailure_ScriptException() { + var e = new ScriptException( + randomAlphaOfLength(10), + new ArithmeticException(randomAlphaOfLength(10)), + singletonList(randomAlphaOfLength(10)), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_BulkIndexExceptionWrappingClusterBlockException() { + final BulkIndexingException bulkIndexingException = new BulkIndexingException( + randomAlphaOfLength(10), + new ClusterBlockException(Map.of("test-index", Set.of(MetadataIndexStateService.INDEX_CLOSED_BLOCK))), + randomBoolean() + ); + + List.of(true, false).forEach((unattended) -> { assertRetryFailureCountNotIncremented(bulkIndexingException, unattended); }); + } + + public void testHandleIndexerFailure_IrrecoverableBulkIndexException() { + final BulkIndexingException e = new BulkIndexingException( + randomAlphaOfLength(10), + new ElasticsearchStatusException(randomAlphaOfLength(10), RestStatus.INTERNAL_SERVER_ERROR), + true + ); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_RecoverableBulkIndexException() { + final BulkIndexingException bulkIndexingException = new BulkIndexingException( + randomAlphaOfLength(10), + new ElasticsearchStatusException(randomAlphaOfLength(10), RestStatus.INTERNAL_SERVER_ERROR), + false + ); + + List.of(true, false).forEach((unattended) -> { assertRetry(bulkIndexingException, unattended); }); + } + + public void testHandleIndexerFailure_ClusterBlockException() { + List.of(true, false).forEach((unattended) -> { + assertRetry( + new ClusterBlockException(Map.of(randomAlphaOfLength(10), Set.of(MetadataIndexStateService.INDEX_CLOSED_BLOCK))), + unattended + ); + }); + } + + public void testHandleIndexerFailure_SearchPhaseExecutionExceptionWithNoShardSearchFailures() { + List.of(true, false).forEach((unattended) -> { + assertRetry( + new SearchPhaseExecutionException(randomAlphaOfLength(10), randomAlphaOfLength(10), ShardSearchFailure.EMPTY_ARRAY), + unattended + ); + }); + } + + public void testHandleIndexerFailure_SearchPhaseExecutionExceptionWithShardSearchFailures() { + List.of(true, false).forEach((unattended) -> { + assertRetry( + new SearchPhaseExecutionException( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + new ShardSearchFailure[] { new ShardSearchFailure(new Exception()) } + ), + unattended + ); + }); + } + + public void testHandleIndexerFailure_RecoverableElasticsearchException() { + List.of(true, false).forEach((unattended) -> { + assertRetry(new ElasticsearchStatusException(randomAlphaOfLength(10), RestStatus.INTERNAL_SERVER_ERROR), unattended); + }); + } + + public void testHandleIndexerFailure_IrrecoverableElasticsearchException() { + var e = new ElasticsearchStatusException(randomAlphaOfLength(10), RestStatus.NOT_FOUND); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_IllegalArgumentException() { + var e = new IllegalArgumentException(randomAlphaOfLength(10)); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_UnexpectedException() { + List.of(true, false).forEach((unattended) -> { assertRetry(new Exception(), unattended); }); + } + + private void assertRetryIfUnattendedOtherwiseFail(Exception e) { + List.of(true, false).forEach((unattended) -> { + if (unattended) { + assertRetry(e, unattended); + } else { + assertFailure(e); + } + }); + } + + private void assertRetry(Exception e, boolean unattended) { String transformId = randomAlphaOfLength(10); - SettingsConfig settings = new SettingsConfig.Builder().setUnattended(true).build(); + SettingsConfig settings = new SettingsConfig.Builder().setNumFailureRetries(2).setUnattended(unattended).build(); MockTransformAuditor auditor = MockTransformAuditor.createMockAuditor(); MockTransformContextListener contextListener = new MockTransformContextListener(); @@ -74,51 +187,33 @@ public void testUnattended() { TransformFailureHandler handler = new TransformFailureHandler(auditor, context, transformId); - handler.handleIndexerFailure( - new SearchPhaseExecutionException( - "query", - "Partial shards failure", - new ShardSearchFailure[] { - new ShardSearchFailure(new CircuitBreakingException("to much memory", 110, 100, CircuitBreaker.Durability.TRANSIENT)) } - ), - settings - ); + assertNoFailure(handler, e, contextListener, settings, true); + assertNoFailure(handler, e, contextListener, settings, true); + if (unattended) { + assertNoFailure(handler, e, contextListener, settings, true); + } else { + // fail after max retry attempts reached + assertFailure(handler, e, contextListener, settings, true); + } + } - // CBE isn't a failure, but it only affects page size(which we don't test here) - assertFalse(contextListener.getFailed()); - assertEquals(0, contextListener.getFailureCountChangedCounter()); + private void assertRetryFailureCountNotIncremented(Exception e, boolean unattended) { + String transformId = randomAlphaOfLength(10); + SettingsConfig settings = new SettingsConfig.Builder().setNumFailureRetries(2).setUnattended(unattended).build(); - assertNoFailure( - handler, - new SearchPhaseExecutionException( - "query", - "Partial shards failure", - new ShardSearchFailure[] { - new ShardSearchFailure( - new ScriptException( - "runtime error", - new ArithmeticException("/ by zero"), - singletonList("stack"), - "test", - "painless" - ) - ) } - ), - contextListener, - settings - ); - assertNoFailure( - handler, - new ElasticsearchStatusException("something really bad happened", RestStatus.INTERNAL_SERVER_ERROR), - contextListener, - settings - ); - assertNoFailure(handler, new IllegalArgumentException("expected apples not oranges"), contextListener, settings); - assertNoFailure(handler, new RuntimeException("the s*** hit the fan"), contextListener, settings); - assertNoFailure(handler, new NullPointerException("NPE"), contextListener, settings); + MockTransformAuditor auditor = MockTransformAuditor.createMockAuditor(); + MockTransformContextListener contextListener = new MockTransformContextListener(); + TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, contextListener); + context.setPageSize(500); + + TransformFailureHandler handler = new TransformFailureHandler(auditor, context, transformId); + + assertNoFailure(handler, e, contextListener, settings, false); + assertNoFailure(handler, e, contextListener, settings, false); + assertNoFailure(handler, e, contextListener, settings, false); } - public void testClusterBlock() { + private void assertFailure(Exception e) { String transformId = randomAlphaOfLength(10); SettingsConfig settings = new SettingsConfig.Builder().setNumFailureRetries(2).build(); @@ -129,32 +224,50 @@ public void testClusterBlock() { TransformFailureHandler handler = new TransformFailureHandler(auditor, context, transformId); - final ClusterBlockException clusterBlock = new ClusterBlockException( - Map.of("test-index", Set.of(MetadataIndexStateService.INDEX_CLOSED_BLOCK)) - ); + assertFailure(handler, e, contextListener, settings, false); + } - handler.handleIndexerFailure(clusterBlock, settings); - assertFalse(contextListener.getFailed()); - assertEquals(1, contextListener.getFailureCountChangedCounter()); + private void assertNoFailure( + TransformFailureHandler handler, + Exception e, + MockTransformContextListener mockTransformContextListener, + SettingsConfig settings, + boolean failureCountIncremented + ) { + handler.handleIndexerFailure(e, settings); + assertFalse(mockTransformContextListener.getFailed()); + assertEquals(failureCountIncremented ? 1 : 0, mockTransformContextListener.getFailureCountChangedCounter()); + mockTransformContextListener.reset(); + } - handler.handleIndexerFailure(clusterBlock, settings); - assertFalse(contextListener.getFailed()); - assertEquals(2, contextListener.getFailureCountChangedCounter()); + private void assertNoFailureAndContextPageSizeSet(Exception e, boolean unattended, int newPageSize) { + String transformId = randomAlphaOfLength(10); + SettingsConfig settings = new SettingsConfig.Builder().setNumFailureRetries(2).setUnattended(unattended).build(); - handler.handleIndexerFailure(clusterBlock, settings); - assertTrue(contextListener.getFailed()); - assertEquals(3, contextListener.getFailureCountChangedCounter()); + MockTransformAuditor auditor = MockTransformAuditor.createMockAuditor(); + MockTransformContextListener contextListener = new MockTransformContextListener(); + TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, contextListener); + context.setPageSize(500); + + TransformFailureHandler handler = new TransformFailureHandler(auditor, context, transformId); + + handler.handleIndexerFailure(e, settings); + assertFalse(contextListener.getFailed()); + assertEquals(0, contextListener.getFailureCountChangedCounter()); + assertEquals(newPageSize, context.getPageSize()); + contextListener.reset(); } - private void assertNoFailure( + private void assertFailure( TransformFailureHandler handler, Exception e, MockTransformContextListener mockTransformContextListener, - SettingsConfig settings + SettingsConfig settings, + boolean failureCountChanged ) { handler.handleIndexerFailure(e, settings); - assertFalse(mockTransformContextListener.getFailed()); - assertEquals(1, mockTransformContextListener.getFailureCountChangedCounter()); + assertTrue(mockTransformContextListener.getFailed()); + assertEquals(failureCountChanged ? 1 : 0, mockTransformContextListener.getFailureCountChangedCounter()); mockTransformContextListener.reset(); } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml index 4c0bbfd7ec139..1b435c551fbe9 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml @@ -86,11 +86,12 @@ teardown: ignore: 404 --- -"Index data and search on the mixed cluster": +"ES|QL cross-cluster query fails with basic license": - skip: features: allowed_warnings - do: + catch: bad_request allowed_warnings: - "Line 1:21: Square brackets '[]' need to be removed in FROM METADATA declaration" headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } @@ -98,23 +99,11 @@ teardown: body: query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT tag | LIMIT 10' - - match: {columns.0.name: "total"} - - match: {columns.0.type: "long"} - - match: {columns.1.name: "tag"} - - match: {columns.1.type: "keyword"} - - - match: {values.0.0: 2200} - - match: {values.0.1: "computer"} - - match: {values.1.0: 170} - - match: {values.1.1: "headphone"} - - match: {values.2.0: 2100 } - - match: {values.2.1: "laptop" } - - match: {values.3.0: 1000 } - - match: {values.3.1: "monitor" } - - match: {values.4.0: 550 } - - match: {values.4.1: "tablet" } + - match: { error.type: "status_exception" } + - match: { error.reason: "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: active basic license" } - do: + catch: bad_request allowed_warnings: - "Line 1:21: Square brackets '[]' need to be removed in FROM METADATA declaration" headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } @@ -128,28 +117,11 @@ teardown: lte: "2023-01-03" format: "yyyy-MM-dd" - - match: {columns.0.name: "_index"} - - match: {columns.0.type: "keyword"} - - match: {columns.1.name: "tag"} - - match: {columns.1.type: "keyword"} - - match: {columns.2.name: "cost" } - - match: {columns.2.type: "long" } - - - match: {values.0.0: "esql_local"} - - match: {values.0.1: "monitor"} - - match: {values.0.2: 250 } - - match: {values.1.0: "my_remote_cluster:esql_index" } - - match: {values.1.1: "tablet"} - - match: {values.1.2: 450 } - - match: {values.2.0: "my_remote_cluster:esql_index" } - - match: {values.2.1: "computer" } - - match: {values.2.2: 1200 } - - match: {values.3.0: "esql_local"} - - match: {values.3.1: "laptop" } - - match: {values.3.2: 2100 } + - match: { error.type: "status_exception" } + - match: { error.reason: "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: active basic license" } --- -"Enrich across clusters": +"ES|QL enrich query across clusters fails with basic license": - requires: cluster_features: ["gte_v8.13.0"] reason: "Enrich across clusters available in 8.13 or later" @@ -194,27 +166,14 @@ teardown: index: suggestions - do: + catch: bad_request headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } esql.query: body: query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT total DESC | LIMIT 3 | ENRICH suggestions | KEEP tag, total, phrase' - - match: {columns.0.name: "tag"} - - match: {columns.0.type: "keyword"} - - match: {columns.1.name: "total" } - - match: {columns.1.type: "long" } - - match: {columns.2.name: "phrase" } - - match: {columns.2.type: "keyword" } - - - match: {values.0.0: "computer"} - - match: {values.0.1: 2200} - - match: {values.0.2: "best desktop for programming"} - - match: {values.1.0: "laptop"} - - match: {values.1.1: 2100 } - - match: {values.1.2: "the best battery life laptop"} - - match: {values.2.0: "monitor" } - - match: {values.2.1: 1000 } - - match: {values.2.2: "4k or 5k or 6K monitor?" } + - match: { error.type: "status_exception" } + - match: { error.reason: "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: active basic license" } - do: enrich.delete_policy: