diff --git a/.buildkite/scripts/gradle-configuration-cache-validation.sh b/.buildkite/scripts/gradle-configuration-cache-validation.sh index 8249155c5ffc..55a4b18a1e88 100755 --- a/.buildkite/scripts/gradle-configuration-cache-validation.sh +++ b/.buildkite/scripts/gradle-configuration-cache-validation.sh @@ -2,18 +2,17 @@ set -euo pipefail -# TODO/ FIXIT without a full resolved gradle home, we see issues configuration cache reuse -./gradlew --max-workers=8 --parallel --scan --no-daemon precommit +# This is a workaround for https://github.com/gradle/gradle/issues/28159 +.ci/scripts/run-gradle.sh --no-daemon precommit -./gradlew --max-workers=8 --parallel --scan --configuration-cache precommit -Dorg.gradle.configuration-cache.inputs.unsafe.ignore.file-system-checks=build/*.tar.bz2 +.ci/scripts/run-gradle.sh --configuration-cache precommit -Dorg.gradle.configuration-cache.inputs.unsafe.ignore.file-system-checks=build/*.tar.bz2 # Create a temporary file tmpOutputFile=$(mktemp) trap "rm $tmpOutputFile" EXIT echo "2nd run" -# TODO run-gradle.sh script causes issues because of init script handling -./gradlew --max-workers=8 --parallel --scan --configuration-cache precommit -Dorg.gradle.configuration-cache.inputs.unsafe.ignore.file-system-checks=build/*.tar.bz2 | tee $tmpOutputFile +.ci/scripts/run-gradle.sh --configuration-cache precommit -Dorg.gradle.configuration-cache.inputs.unsafe.ignore.file-system-checks=build/*.tar.bz2 | tee $tmpOutputFile # Check if the command was successful if grep -q "Configuration cache entry reused." $tmpOutputFile; then diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 3a066be2f8ea..6f2dc3c64feb 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -472,7 +472,7 @@ You can run a group of YAML test by using wildcards: --tests "org.elasticsearch.test.rest.ClientYamlTestSuiteIT.test {yaml=index/*/*}" --------------------------------------------------------------------------- -or +or --------------------------------------------------------------------------- ./gradlew :rest-api-spec:yamlRestTest \ @@ -564,8 +564,8 @@ Sometimes a backward compatibility change spans two versions. A common case is a new functionality that needs a BWC bridge in an unreleased versioned of a release branch (for example, 5.x). Another use case, since the introduction of serverless, is to test BWC against main in addition to the other released branches. To do so, specify the `bwc.refspec` remote and branch to use for the BWC build as `origin/main`. -To test against main, you will also need to create a new version in link:./server/src/main/java/org/elasticsearch/Version.java[Version.java], -increment `elasticsearch` in link:./build-tools-internal/version.properties[version.properties], and hard-code the `project.version` for ml-cpp +To test against main, you will also need to create a new version in link:./server/src/main/java/org/elasticsearch/Version.java[Version.java], +increment `elasticsearch` in link:./build-tools-internal/version.properties[version.properties], and hard-code the `project.version` for ml-cpp in link:./x-pack/plugin/ml/build.gradle[ml/build.gradle]. In general, to test the changes, you can instruct Gradle to build the BWC version from another remote/branch combination instead of pulling the release branch from GitHub. @@ -625,7 +625,7 @@ For specific YAML rest tests one can use For disabling entire types of tests for subprojects, one can use for example: ------------------------------------------------ -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm) { // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("javaRestTest").configure{enabled = false } } diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index f3ced9f16d32..25cfae6c9803 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.TestUtil /* @@ -78,7 +77,7 @@ tasks.register("copyPainless", Copy) { } tasks.named("run").configure { - executable = "${BuildParams.runtimeJavaHome}/bin/java" + executable = "${buildParams.runtimeJavaHome.get()}/bin/java" args << "-Dplugins.dir=${buildDir}/plugins" << "-Dtests.index=${buildDir}/index" dependsOn "copyExpression", "copyPainless", configurations.nativeLib systemProperty 'es.nativelibs.path', TestUtil.getTestLibraryPath(file("../libs/native/libraries/build/platform/").toString()) diff --git a/build-conventions/build.gradle b/build-conventions/build.gradle index d8c211c0f02f..b0eda5a34065 100644 --- a/build-conventions/build.gradle +++ b/build-conventions/build.gradle @@ -12,9 +12,6 @@ import org.gradle.plugins.ide.eclipse.model.SourceFolder buildscript { repositories { - maven { - url 'https://jitpack.io' - } mavenCentral() } } @@ -70,10 +67,6 @@ gradlePlugin { } repositories { - maven { - url 'https://jitpack.io' - } - mavenCentral() gradlePluginPortal() } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GUtils.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GUtils.java index 9a35aa41ba1e..0b04496866ca 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GUtils.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GUtils.java @@ -16,4 +16,12 @@ public abstract class GUtils { public static String capitalize(String s) { return s.substring(0, 1).toUpperCase(Locale.ROOT) + s.substring(1); } + + public static T elvis(T given, T fallback) { + if (given == null) { + return fallback; + } else { + return given; + } + } } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java index cd13743ee074..c3124812e508 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java @@ -9,12 +9,14 @@ package org.elasticsearch.gradle.internal.conventions; -import org.elasticsearch.gradle.internal.conventions.precommit.PomValidationPrecommitPlugin; +import groovy.util.Node; + import com.github.jengelman.gradle.plugins.shadow.ShadowExtension; import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin; -import groovy.util.Node; -import org.elasticsearch.gradle.internal.conventions.util.Util; + import org.elasticsearch.gradle.internal.conventions.info.GitInfo; +import org.elasticsearch.gradle.internal.conventions.precommit.PomValidationPrecommitPlugin; +import org.elasticsearch.gradle.internal.conventions.util.Util; import org.gradle.api.NamedDomainObjectSet; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -35,11 +37,12 @@ import org.gradle.api.tasks.bundling.Jar; import org.gradle.initialization.layout.BuildLayout; import org.gradle.language.base.plugins.LifecycleBasePlugin; +import org.w3c.dom.Element; -import javax.inject.Inject; import java.io.File; import java.util.Map; import java.util.concurrent.Callable; +import javax.inject.Inject; public class PublishPlugin implements Plugin { @@ -64,6 +67,7 @@ public void apply(Project project) { configureSourcesJar(project); configurePomGeneration(project); configurePublications(project); + formatGeneratedPom(project); } private void configurePublications(Project project) { @@ -113,29 +117,32 @@ private void configurePomGeneration(Project project) { var archivesBaseName = providerFactory.provider(() -> getArchivesBaseName(extensions)); var projectVersion = providerFactory.provider(() -> project.getVersion()); var generateMavenPoms = project.getTasks().withType(GenerateMavenPom.class); - generateMavenPoms.configureEach( - pomTask -> pomTask.setDestination( + generateMavenPoms.configureEach(pomTask -> { + pomTask.setDestination( (Callable) () -> String.format( "%s/distributions/%s-%s.pom", projectLayout.getBuildDirectory().get().getAsFile().getPath(), archivesBaseName.get(), projectVersion.get() ) - ) - ); + ); + }); + var publishing = extensions.getByType(PublishingExtension.class); final var mavenPublications = publishing.getPublications().withType(MavenPublication.class); - addNameAndDescriptiontoPom(project, mavenPublications); + addNameAndDescriptionToPom(project, mavenPublications); mavenPublications.configureEach(publication -> { - // Add git origin info to generated POM files for internal builds - publication.getPom().withXml(xml -> addScmInfo(xml, gitInfo.get())); + publication.getPom().withXml(xml -> { + // Add git origin info to generated POM files for internal builds + addScmInfo(xml, gitInfo.get()); + }); // have to defer this until archivesBaseName is set project.afterEvaluate(p -> publication.setArtifactId(archivesBaseName.get())); generatePomTask.configure(t -> t.dependsOn(generateMavenPoms)); }); } - private void addNameAndDescriptiontoPom(Project project, NamedDomainObjectSet mavenPublications) { + private void addNameAndDescriptionToPom(Project project, NamedDomainObjectSet mavenPublications) { var name = project.getName(); var description = providerFactory.provider(() -> project.getDescription() != null ? project.getDescription() : ""); mavenPublications.configureEach(p -> p.getPom().withXml(xml -> { @@ -186,4 +193,32 @@ static void configureSourcesJar(Project project) { project.getTasks().named(BasePlugin.ASSEMBLE_TASK_NAME).configure(t -> t.dependsOn(sourcesJarTask)); }); } + + + /** + * Format the generated pom files to be in a sort of reproducible order. + */ + private void formatGeneratedPom(Project project) { + var publishing = project.getExtensions().getByType(PublishingExtension.class); + final var mavenPublications = publishing.getPublications().withType(MavenPublication.class); + mavenPublications.configureEach(publication -> { + publication.getPom().withXml(xml -> { + // Add some pom formatting + formatDependencies(xml); + }); + }); + } + + /** + * just ensure we put dependencies to the end. more a cosmetic thing than anything else + * */ + private void formatDependencies(XmlProvider xml) { + Element rootElement = xml.asElement(); + var dependencies = rootElement.getElementsByTagName("dependencies"); + if (dependencies.getLength() == 1 && dependencies.item(0) != null) { + org.w3c.dom.Node item = dependencies.item(0); + rootElement.removeChild(item); + rootElement.appendChild(item); + } + } } diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 38d3c0cd326f..f2a02645f8c0 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -258,9 +258,6 @@ tasks.named('licenseHeaders').configure { *****************************************************************************/ repositories { - maven { - url 'https://jitpack.io' - } mavenCentral() gradlePluginPortal() } @@ -386,10 +383,13 @@ tasks.named("jar") { spotless { java { - // IDEs can sometimes run annotation processors that leave files in - // here, causing Spotless to complain. Even though this path ought not - // to exist, exclude it anyway in order to avoid spurious failures. - toggleOffOn() + + // workaround for https://github.com/diffplug/spotless/issues/2317 + //toggleOffOn() + target project.fileTree("src/main/java") { + include '**/*.java' + exclude '**/DockerBase.java' + } } } diff --git a/build-tools-internal/settings.gradle b/build-tools-internal/settings.gradle index 1b4fb1215a59..8c88d3604676 100644 --- a/build-tools-internal/settings.gradle +++ b/build-tools-internal/settings.gradle @@ -1,8 +1,5 @@ pluginManagement { repositories { - maven { - url 'https://jitpack.io' - } mavenCentral() gradlePluginPortal() } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavadocPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavadocPluginFuncTest.groovy index b853fdef6a13..34fa73ce502a 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavadocPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavadocPluginFuncTest.groovy @@ -73,7 +73,7 @@ class ElasticsearchJavadocPluginFuncTest extends AbstractGradleFuncTest { buildFile << """ plugins { id 'elasticsearch.java-doc' - id 'com.github.johnrengelman.shadow' version '7.1.2' + id 'com.gradleup.shadow' id 'java' } group = 'org.acme.depending' diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy index 87f4bbee0578..6d080e1c8076 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy @@ -9,16 +9,10 @@ package org.elasticsearch.gradle.internal -import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.fixtures.AbstractGitAwareGradleFuncTest import org.gradle.testkit.runner.TaskOutcome -import spock.lang.IgnoreIf import spock.lang.Unroll -/* - * Test is ignored on ARM since this test case tests the ability to build certain older BWC branches that we don't support on ARM - */ -@IgnoreIf({ Architecture.current() == Architecture.AARCH64 }) class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleFuncTest { def setup() { diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy index 6e403c85a23f..c7e11ba96c7d 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy @@ -96,7 +96,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { plugins { id 'elasticsearch.java' id 'elasticsearch.publish' - id 'com.github.johnrengelman.shadow' + id 'com.gradleup.shadow' } repositories { @@ -117,7 +117,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { } version = "1.0" group = 'org.acme' - description = 'some description' + description = 'shadowed project' """ when: @@ -137,7 +137,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { hello-world 1.0 hello-world - some description + shadowed project unknown unknown @@ -186,7 +186,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { plugins { id 'elasticsearch.java' id 'elasticsearch.publish' - id 'com.github.johnrengelman.shadow' + id 'com.gradleup.shadow' } dependencies { @@ -206,7 +206,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { group = 'org.acme' } - description = 'some description' + description = 'with shadowed dependencies' """ when: @@ -226,7 +226,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { hello-world 1.0 hello-world - some description + with shadowed dependencies unknown unknown @@ -277,13 +277,13 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { plugins { id 'elasticsearch.internal-es-plugin' id 'elasticsearch.publish' - id 'com.github.johnrengelman.shadow' + id 'com.gradleup.shadow' } esplugin { name = 'hello-world-plugin' classname 'org.acme.HelloWorldPlugin' - description = "custom project description" + description = "shadowed es plugin" } publishing { @@ -324,7 +324,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { hello-world-plugin 1.0 hello-world - custom project description + shadowed es plugin unknown unknown @@ -353,7 +353,6 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { https://www.elastic.co - """ ) } @@ -440,8 +439,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { // scm info only added for internal builds internalBuild() buildFile << """ - BuildParams.init { it.setGitOrigin("https://some-repo.com/repo.git") } - + buildParams.getGitOriginProperty().set("https://some-repo.com/repo.git") apply plugin:'elasticsearch.java' apply plugin:'elasticsearch.publish' diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePluginFuncTest.groovy index 354100a9b82c..725f117d17e6 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePluginFuncTest.groovy @@ -161,7 +161,7 @@ class SnykDependencyMonitoringGradlePluginFuncTest extends AbstractGradleInterna }, "target": { "remoteUrl": "http://acme.org", - "branch": "unknown" + "branch": "$version" }, "targetReference": "$version", "projectAttributes": { diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestTestPluginFuncTest.groovy index 97f03d982111..ce5c1519fe11 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestTestPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestTestPluginFuncTest.groovy @@ -10,6 +10,7 @@ package org.elasticsearch.gradle.internal.test.rest import spock.lang.IgnoreIf +import spock.lang.IgnoreRest import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.fixtures.AbstractRestResourcesFuncTest @@ -20,16 +21,16 @@ import org.gradle.testkit.runner.TaskOutcome class LegacyYamlRestTestPluginFuncTest extends AbstractRestResourcesFuncTest { def setup() { + configurationCacheCompatible = true buildApiRestrictionsDisabled = true } def "yamlRestTest does nothing when there are no tests"() { given: + internalBuild() buildFile << """ - plugins { - id 'elasticsearch.legacy-yaml-rest-test' - } + apply plugin: 'elasticsearch.legacy-yaml-rest-test' """ when: @@ -136,7 +137,7 @@ class LegacyYamlRestTestPluginFuncTest extends AbstractRestResourcesFuncTest { """ when: - def result = gradleRunner("yamlRestTest", "--console", 'plain', '--stacktrace').buildAndFail() + def result = gradleRunner("yamlRestTest", "--console", 'plain').buildAndFail() then: result.task(":distribution:archives:integ-test-zip:buildExpanded").outcome == TaskOutcome.SUCCESS diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 592e6af41ab0..847eda7a355c 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -12,11 +12,14 @@ import java.time.LocalDateTime; import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams +import static org.elasticsearch.gradle.internal.util.CiUtils.safeName import java.lang.management.ManagementFactory import java.time.LocalDateTime +// Resolving this early to avoid issues with the build scan plugin in combination with the configuration cache usage +def taskNames = gradle.startParameter.taskNames.join(' ') + develocity { buildScan { @@ -34,12 +37,15 @@ develocity { publishing.onlyIf { false } } + def fips = buildParams.inFipsJvm + def gitRevision = buildParams.gitRevision + background { tag OS.current().name() tag Architecture.current().name() // Tag if this build is run in FIPS mode - if (BuildParams.inFipsJvm) { + if (fips) { tag 'FIPS' } @@ -92,8 +98,8 @@ develocity { link 'Source', "${prBaseUrl}/tree/${System.getenv('BUILDKITE_COMMIT')}" link 'Pull Request', "https://github.com/${repository}/pull/${prId}" } else { - value 'Git Commit ID', BuildParams.gitRevision - link 'Source', "https://github.com/${repository}/tree/${BuildParams.gitRevision}" + value 'Git Commit ID', gitRevision + link 'Source', "https://github.com/${repository}/tree/${gitRevision}" } buildFinished { result -> @@ -108,7 +114,7 @@ develocity { // Add a build annotation // See: https://buildkite.com/docs/agent/v3/cli-annotate - def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failures ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" + def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failures ? 'failed' : 'successful'} build: gradle ${taskNames}
""" def process = [ 'buildkite-agent', 'annotate', @@ -129,7 +135,3 @@ develocity { } } } - -static def safeName(String string) { - return string.replaceAll(/[^a-zA-Z0-9_\-\.]+/, ' ').trim().replaceAll(' ', '_').toLowerCase() -} diff --git a/build-tools-internal/src/main/groovy/elasticsearch.bwc-test.gradle b/build-tools-internal/src/main/groovy/elasticsearch.bwc-test.gradle index 9a988292b5b8..77e509ea9787 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.bwc-test.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.bwc-test.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.ElasticsearchTestBasePlugin -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.rest.InternalJavaRestTestPlugin import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -19,7 +18,7 @@ ext.bwcTaskName = { Version version -> def bwcTestSnapshots = tasks.register("bwcTestSnapshots") { if (project.bwc_tests_enabled) { - dependsOn tasks.matching { task -> BuildParams.bwcVersions.unreleased.any { version -> bwcTaskName(version) == task.name } } + dependsOn tasks.matching { task -> buildParams.bwcVersions.unreleased.any { version -> bwcTaskName(version) == task.name } } } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 3bff30d9511f..493f7a505bb5 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -15,11 +15,12 @@ import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.testclusters.TestClustersAware import org.elasticsearch.gradle.testclusters.TestDistribution -// Common config when running with a FIPS-140 runtime JVM -if (BuildParams.inFipsJvm) { +//apply plugin: org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin +// Common config when running with a FIPS-140 runtime JVM +if (buildParams.inFipsJvm) { allprojects { - String javaSecurityFilename = BuildParams.runtimeJavaDetails.toLowerCase().contains('oracle') ? 'fips_java_oracle.security' : 'fips_java.security' + String javaSecurityFilename = buildParams.runtimeJavaDetails.get().toLowerCase().contains('oracle') ? 'fips_java_oracle.security' : 'fips_java.security' File fipsResourcesDir = new File(project.buildDir, 'fips-resources') File fipsSecurity = new File(fipsResourcesDir, javaSecurityFilename) File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy') diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 63a3cb6d86d6..5640409e0ff4 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -171,7 +171,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { idea { project { vcs = 'Git' - jdkName = BuildParams.minimumCompilerVersion.majorVersion + jdkName = buildParams.minimumCompilerVersion.majorVersion settings { delegateActions { diff --git a/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle b/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle index aacc86e764d5..224e6bd4c50d 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle @@ -10,7 +10,6 @@ import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.ThirdPartyAuditPrecommitPlugin import org.elasticsearch.gradle.internal.precommit.ThirdPartyAuditTask import org.elasticsearch.gradle.internal.test.rest.RestTestBasePlugin @@ -27,8 +26,8 @@ configure(allprojects) { JvmVendorSpec.matching(VersionProperties.bundledJdkVendor) } project.tasks.withType(Test).configureEach { Test test -> - if (BuildParams.getIsRuntimeJavaHomeSet()) { - test.executable = "${BuildParams.runtimeJavaHome}/bin/java" + + if (buildParams.getIsRuntimeJavaHomeSet()) { + test.executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + (OS.current() == OS.WINDOWS ? '.exe' : '') } else { test.javaLauncher = javaToolchains.launcherFor { @@ -41,7 +40,7 @@ configure(allprojects) { } project.plugins.withId("elasticsearch.testclusters") { testClustersPlugin -> project.plugins.withId("elasticsearch.internal-testclusters") { internalPlugin -> - if (BuildParams.getIsRuntimeJavaHomeSet() == false) { + if (buildParams.getIsRuntimeJavaHomeSet() == false) { // If no runtime java home is set, use the bundled JDK for test clusters testClustersPlugin.setRuntimeJava(launcher.map { it.metadata.installationPath.asFile }) } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle b/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle index 1fab4d035177..27b490329f8c 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle @@ -17,11 +17,11 @@ dependencies { newJar project(":libs:${project.name}") } -BuildParams.bwcVersions.withIndexCompatible({ it.onOrAfter(Version.fromString(ext.stableApiSince)) +buildParams.bwcVersions.withIndexCompatible({ it.onOrAfter(Version.fromString(ext.stableApiSince)) && it != VersionProperties.elasticsearchVersion }) { bwcVersion, baseName -> - BwcVersions.UnreleasedVersionInfo unreleasedVersion = BuildParams.bwcVersions.unreleasedInfo(bwcVersion) + BwcVersions.UnreleasedVersionInfo unreleasedVersion = buildParams.bwcVersions.unreleasedInfo(bwcVersion) configurations { "oldJar${baseName}" { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java index 04f031d4a516..49887dac5b6f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java @@ -12,7 +12,7 @@ import groovy.lang.Closure; import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin; import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; @@ -39,6 +39,7 @@ public void apply(Project project) { project.getPluginManager().apply(JarHellPrecommitPlugin.class); project.getPluginManager().apply(ElasticsearchJavaPlugin.class); project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class); + boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).isCi(); // Clear default dependencies added by public PluginBuildPlugin as we add our // own project dependencies for internal builds // TODO remove once we removed default dependencies from PluginBuildPlugin @@ -54,7 +55,7 @@ public void apply(Project project) { .set("addQaCheckDependencies", new Closure(BaseInternalPluginBuildPlugin.this, BaseInternalPluginBuildPlugin.this) { public void doCall(Project proj) { // This is only a convenience for local developers so make this a noop when running in CI - if (BuildParams.isCi() == false) { + if (isCi == false) { proj.afterEvaluate(project1 -> { // let check depend on check tasks of qa sub-projects final var checkTaskProvider = project1.getTasks().named("check"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java index 40d16bafbb26..d7bf839817e1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java @@ -13,7 +13,6 @@ import org.elasticsearch.gradle.LoggedExec; import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.Version; -import org.elasticsearch.gradle.internal.info.BuildParams; import org.gradle.api.Action; import org.gradle.api.GradleException; import org.gradle.api.Project; @@ -47,6 +46,7 @@ public class BwcSetupExtension { private final ProviderFactory providerFactory; private final JavaToolchainService toolChainService; private final Provider unreleasedVersionInfo; + private final Boolean isCi; private Provider checkoutDir; @@ -56,7 +56,8 @@ public BwcSetupExtension( ProviderFactory providerFactory, JavaToolchainService toolChainService, Provider unreleasedVersionInfo, - Provider checkoutDir + Provider checkoutDir, + Boolean isCi ) { this.project = project; this.objectFactory = objectFactory; @@ -64,6 +65,7 @@ public BwcSetupExtension( this.toolChainService = toolChainService; this.unreleasedVersionInfo = unreleasedVersionInfo; this.checkoutDir = checkoutDir; + this.isCi = isCi; } TaskProvider bwcTask(String name, Action configuration) { @@ -80,7 +82,8 @@ TaskProvider bwcTask(String name, Action configuration, toolChainService, name, configuration, - useUniqueUserHome + useUniqueUserHome, + isCi ); } @@ -93,7 +96,8 @@ private static TaskProvider createRunBwcGradleTask( JavaToolchainService toolChainService, String name, Action configAction, - boolean useUniqueUserHome + boolean useUniqueUserHome, + boolean isCi ) { return project.getTasks().register(name, LoggedExec.class, loggedExec -> { loggedExec.dependsOn("checkoutBwcBranch"); @@ -104,7 +108,7 @@ private static TaskProvider createRunBwcGradleTask( spec.getParameters().getCheckoutDir().set(checkoutDir); }).flatMap(s -> getJavaHome(objectFactory, toolChainService, Integer.parseInt(s)))); - if (BuildParams.isCi() && OS.current() != OS.WINDOWS) { + if (isCi && OS.current() != OS.WINDOWS) { // TODO: Disabled for now until we can figure out why files are getting corrupted // loggedExec.getEnvironment().put("GRADLE_RO_DEP_CACHE", System.getProperty("user.home") + "/gradle_ro_cache"); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java index f467a204c034..93c2623a23d3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java @@ -11,6 +11,7 @@ import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; +import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -60,7 +61,8 @@ * We are then able to map the unreleased version to branches in git and Gradle projects that are capable of checking * out and building them, so we can include these in the testing plan as well. */ -public class BwcVersions { + +public class BwcVersions implements Serializable { private static final Pattern LINE_PATTERN = Pattern.compile( "\\W+public static final Version V_(\\d+)_(\\d+)_(\\d+)(_alpha\\d+|_beta\\d+|_rc\\d+)?.*\\);" @@ -68,7 +70,7 @@ public class BwcVersions { private static final String GLIBC_VERSION_ENV_VAR = "GLIBC_VERSION"; private final Version currentVersion; - private final List versions; + private final transient List versions; private final Map unreleased; public BwcVersions(List versionLines) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index 05b7af83aa8e..c897b142da2f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -11,7 +11,7 @@ import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitTaskPlugin; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.test.MutedTestPlugin; import org.elasticsearch.gradle.internal.test.TestUtil; @@ -49,6 +49,7 @@ public class ElasticsearchJavaBasePlugin implements Plugin { private final JavaToolchainService javaToolchains; + private BuildParameterExtension buildParams; @Inject ElasticsearchJavaBasePlugin(JavaToolchainService javaToolchains) { @@ -57,8 +58,10 @@ public class ElasticsearchJavaBasePlugin implements Plugin { @Override public void apply(Project project) { + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); // make sure the global build info plugin is applied to the root project project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); + buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); project.getPluginManager().apply(JavaBasePlugin.class); // common repositories setup project.getPluginManager().apply(RepositoriesSetupPlugin.class); @@ -129,14 +132,14 @@ private static void disableTransitiveDependenciesForSourceSet(Project project, S public void configureCompile(Project project) { project.getExtensions().getExtraProperties().set("compactProfile", "full"); JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); - if (BuildParams.getJavaToolChainSpec().isPresent()) { - java.toolchain(BuildParams.getJavaToolChainSpec().get()); + if (buildParams.getJavaToolChainSpec().isPresent()) { + java.toolchain(buildParams.getJavaToolChainSpec().get()); } - java.setSourceCompatibility(BuildParams.getMinimumRuntimeVersion()); - java.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion()); + java.setSourceCompatibility(buildParams.getMinimumRuntimeVersion()); + java.setTargetCompatibility(buildParams.getMinimumRuntimeVersion()); project.getTasks().withType(JavaCompile.class).configureEach(compileTask -> { compileTask.getJavaCompiler().set(javaToolchains.compilerFor(spec -> { - spec.getLanguageVersion().set(JavaLanguageVersion.of(BuildParams.getMinimumRuntimeVersion().getMajorVersion())); + spec.getLanguageVersion().set(JavaLanguageVersion.of(buildParams.getMinimumRuntimeVersion().getMajorVersion())); })); CompileOptions compileOptions = compileTask.getOptions(); @@ -159,7 +162,7 @@ public void configureCompile(Project project) { compileTask.getConventionMapping().map("sourceCompatibility", () -> java.getSourceCompatibility().toString()); compileTask.getConventionMapping().map("targetCompatibility", () -> java.getTargetCompatibility().toString()); compileOptions.getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); - compileOptions.setIncremental(BuildParams.isCi() == false); + compileOptions.setIncremental(buildParams.isCi() == false); }); // also apply release flag to groovy, which is used in build-tools project.getTasks().withType(GroovyCompile.class).configureEach(compileTask -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java index d064c70c7281..e62c26c7fbc0 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java @@ -15,8 +15,10 @@ import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.gradle.api.Action; +import org.gradle.api.JavaVersion; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; @@ -24,6 +26,7 @@ import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.JavaLibraryPlugin; import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.provider.Property; import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.bundling.Jar; import org.gradle.api.tasks.javadoc.Javadoc; @@ -34,6 +37,7 @@ import java.util.Map; import static org.elasticsearch.gradle.internal.conventions.util.Util.toStringable; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; /** * A wrapper around Gradle's Java plugin that applies our @@ -42,13 +46,15 @@ public class ElasticsearchJavaPlugin implements Plugin { @Override public void apply(Project project) { + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + Property buildParams = loadBuildParams(project); project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); project.getPluginManager().apply(JavaLibraryPlugin.class); project.getPluginManager().apply(ElasticsearchJavaModulePathPlugin.class); // configureConfigurations(project); - configureJars(project); - configureJarManifest(project); + configureJars(project, buildParams.get()); + configureJarManifest(project, buildParams.get()); configureJavadoc(project); testCompileOnlyDeps(project); } @@ -63,7 +69,9 @@ private static void testCompileOnlyDeps(Project project) { /** * Adds additional manifest info to jars */ - static void configureJars(Project project) { + static void configureJars(Project project, BuildParameterExtension buildParams) { + String buildDate = buildParams.getBuildDate().toString(); + JavaVersion gradleJavaVersion = buildParams.getGradleJavaVersion(); project.getTasks().withType(Jar.class).configureEach(jarTask -> { // we put all our distributable files under distributions jarTask.getDestinationDirectory().set(new File(project.getBuildDir(), "distributions")); @@ -75,14 +83,11 @@ static void configureJars(Project project) { public void execute(Task task) { // this doFirst is added before the info plugin, therefore it will run // after the doFirst added by the info plugin, and we can override attributes - jarTask.getManifest() - .attributes( - Map.of("Build-Date", BuildParams.getBuildDate(), "Build-Java-Version", BuildParams.getGradleJavaVersion()) - ); + jarTask.getManifest().attributes(Map.of("Build-Date", buildDate, "Build-Java-Version", gradleJavaVersion)); } }); }); - project.getPluginManager().withPlugin("com.github.johnrengelman.shadow", p -> { + project.getPluginManager().withPlugin("com.gradleup.shadow", p -> { project.getTasks().withType(ShadowJar.class).configureEach(shadowJar -> { /* * Replace the default "-all" classifier with null @@ -102,10 +107,13 @@ public void execute(Task task) { }); } - private static void configureJarManifest(Project project) { + private static void configureJarManifest(Project project, BuildParameterExtension buildParams) { + String gitOrigin = buildParams.getGitOrigin(); + String gitRevision = buildParams.getGitRevision(); + project.getPlugins().withType(InfoBrokerPlugin.class).whenPluginAdded(manifestPlugin -> { - manifestPlugin.add("Module-Origin", toStringable(BuildParams::getGitOrigin)); - manifestPlugin.add("Change", toStringable(BuildParams::getGitRevision)); + manifestPlugin.add("Module-Origin", toStringable(() -> gitOrigin)); + manifestPlugin.add("Change", toStringable(() -> gitRevision)); manifestPlugin.add("X-Compile-Elasticsearch-Version", toStringable(VersionProperties::getElasticsearch)); manifestPlugin.add("X-Compile-Lucene-Version", toStringable(VersionProperties::getLucene)); manifestPlugin.add( diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index 7a831fbcc146..4446952fec2b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -13,7 +13,7 @@ import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener; import org.elasticsearch.gradle.internal.test.SimpleCommandLineArgumentProvider; @@ -26,6 +26,7 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.provider.Property; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; @@ -37,6 +38,7 @@ import javax.inject.Inject; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; import static org.elasticsearch.gradle.util.FileUtils.mkdirs; import static org.elasticsearch.gradle.util.GradleUtils.maybeConfigure; @@ -52,6 +54,9 @@ public abstract class ElasticsearchTestBasePlugin implements Plugin { @Override public void apply(Project project) { + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + Property buildParams = loadBuildParams(project); + project.getPluginManager().apply(GradleTestPolicySetupPlugin.class); // for fips mode check project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); @@ -100,7 +105,7 @@ public void execute(Task t) { test.getExtensions().add("nonInputProperties", nonInputProperties); test.setWorkingDir(project.file(project.getBuildDir() + "/testrun/" + test.getName().replace("#", "_"))); - test.setMaxParallelForks(Integer.parseInt(System.getProperty("tests.jvms", BuildParams.getDefaultParallel().toString()))); + test.setMaxParallelForks(Integer.parseInt(System.getProperty("tests.jvms", buildParams.get().getDefaultParallel().toString()))); test.exclude("**/*$*.class"); @@ -146,9 +151,9 @@ public void execute(Task t) { // ignore changing test seed when build is passed -Dignore.tests.seed for cacheability experimentation if (System.getProperty("ignore.tests.seed") != null) { - nonInputProperties.systemProperty("tests.seed", BuildParams.getTestSeed()); + nonInputProperties.systemProperty("tests.seed", buildParams.get().getTestSeed()); } else { - test.systemProperty("tests.seed", BuildParams.getTestSeed()); + test.systemProperty("tests.seed", buildParams.get().getTestSeed()); } // don't track these as inputs since they contain absolute paths and break cache relocatability @@ -193,7 +198,7 @@ public void execute(Task t) { * If this project builds a shadow JAR than any unit tests should test against that artifact instead of * compiled class output and dependency jars. This better emulates the runtime environment of consumers. */ - project.getPluginManager().withPlugin("com.github.johnrengelman.shadow", p -> { + project.getPluginManager().withPlugin("com.gradleup.shadow", p -> { if (test.getName().equals(JavaPlugin.TEST_TASK_NAME)) { // Remove output class files and any other dependencies from the test classpath, since the shadow JAR includes these SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index fcf286ed471d..80fd6db59cf9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -10,7 +10,7 @@ package org.elasticsearch.gradle.internal; import org.elasticsearch.gradle.Version; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.gradle.api.Action; import org.gradle.api.InvalidUserDataException; @@ -39,6 +39,7 @@ import static java.util.Arrays.asList; import static java.util.Arrays.stream; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; /** * We want to be able to do BWC tests for unreleased versions without relying on and waiting for snapshots. @@ -64,23 +65,29 @@ public void apply(Project project) { project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); project.getPlugins().apply(JvmToolchainsPlugin.class); toolChainService = project.getExtensions().getByType(JavaToolchainService.class); - BuildParams.getBwcVersions().forPreviousUnreleased((BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> { + BuildParameterExtension buildParams = loadBuildParams(project).get(); + Boolean isCi = buildParams.isCi(); + buildParams.getBwcVersions().forPreviousUnreleased((BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> { configureBwcProject( project.project(unreleasedVersion.gradleProjectPath()), + buildParams, unreleasedVersion, providerFactory, objectFactory, - toolChainService + toolChainService, + isCi ); }); } private static void configureBwcProject( Project project, + BuildParameterExtension buildParams, BwcVersions.UnreleasedVersionInfo versionInfo, ProviderFactory providerFactory, ObjectFactory objectFactory, - JavaToolchainService toolChainService + JavaToolchainService toolChainService, + Boolean isCi ) { ProjectLayout layout = project.getLayout(); Provider versionInfoProvider = providerFactory.provider(() -> versionInfo); @@ -96,7 +103,8 @@ private static void configureBwcProject( providerFactory, toolChainService, versionInfoProvider, - checkoutDir + checkoutDir, + isCi ); BwcGitExtension gitExtension = project.getPlugins().apply(InternalBwcGitPlugin.class).getGitExtension(); Provider bwcVersion = versionInfoProvider.map(info -> info.version()); @@ -122,6 +130,7 @@ private static void configureBwcProject( for (DistributionProject distributionProject : distributionProjects) { createBuildBwcTask( bwcSetupExtension, + buildParams, project, bwcVersion, distributionProject.name, @@ -144,6 +153,7 @@ private static void configureBwcProject( createBuildBwcTask( bwcSetupExtension, + buildParams, project, bwcVersion, "jdbc", @@ -177,6 +187,7 @@ private static void configureBwcProject( createBuildBwcTask( bwcSetupExtension, + buildParams, project, bwcVersion, stableApiProject.getName(), @@ -296,6 +307,7 @@ public static String buildBwcTaskName(String projectName) { static void createBuildBwcTask( BwcSetupExtension bwcSetupExtension, + BuildParameterExtension buildParams, Project project, Provider bwcVersion, String projectName, @@ -316,7 +328,7 @@ static void createBuildBwcTask( } else { c.getOutputs().files(expectedOutputFile); } - c.getOutputs().doNotCacheIf("BWC distribution caching is disabled for local builds", task -> BuildParams.isCi() == false); + c.getOutputs().doNotCacheIf("BWC distribution caching is disabled for local builds", task -> buildParams.isCi() == false); c.getArgs().add("-p"); c.getArgs().add(projectPath); c.getArgs().add(assembleTaskName); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index 0bf4bcb33c23..60699522cdc3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -20,7 +20,7 @@ import org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes; import org.elasticsearch.gradle.internal.docker.DockerSupportPlugin; import org.elasticsearch.gradle.internal.docker.DockerSupportService; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.GradleException; @@ -35,6 +35,8 @@ import java.util.Map; import java.util.function.Function; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; + /** * An internal elasticsearch build plugin that registers additional * distribution resolution strategies to the 'elasticsearch.download-distribution' plugin @@ -47,6 +49,8 @@ public void apply(Project project) { // this is needed for isInternal project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); + BuildParameterExtension buildParams = loadBuildParams(project).get(); + DistributionDownloadPlugin distributionDownloadPlugin = project.getPlugins().apply(DistributionDownloadPlugin.class); Provider dockerSupport = GradleUtils.getBuildService( project.getGradle().getSharedServices(), @@ -55,7 +59,10 @@ public void apply(Project project) { distributionDownloadPlugin.setDockerAvailability( dockerSupport.map(dockerSupportService -> dockerSupportService.getDockerAvailability().isAvailable()) ); - registerInternalDistributionResolutions(DistributionDownloadPlugin.getRegistrationsContainer(project)); + registerInternalDistributionResolutions( + DistributionDownloadPlugin.getRegistrationsContainer(project), + buildParams.getBwcVersionsProperty() + ); } /** @@ -66,7 +73,7 @@ public void apply(Project project) { *

* BWC versions are resolved as project to projects under `:distribution:bwc`. */ - private void registerInternalDistributionResolutions(List resolutions) { + private void registerInternalDistributionResolutions(List resolutions, Provider bwcVersions) { resolutions.add(new DistributionResolution("local-build", (project, distribution) -> { if (isCurrentVersion(distribution)) { // non-external project, so depend on local build @@ -78,7 +85,7 @@ private void registerInternalDistributionResolutions(List { - BwcVersions.UnreleasedVersionInfo unreleasedInfo = BuildParams.getBwcVersions() + BwcVersions.UnreleasedVersionInfo unreleasedInfo = bwcVersions.get() .unreleasedInfo(Version.fromString(distribution.getVersion())); if (unreleasedInfo != null) { if (distribution.getBundledJdk() == false) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java index c7ab83ff7829..7e7ffad12a9a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java @@ -10,34 +10,29 @@ package org.elasticsearch.gradle.internal; import org.elasticsearch.gradle.VersionProperties; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; import org.elasticsearch.gradle.testclusters.TestClustersPlugin; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.provider.ProviderFactory; -import javax.inject.Inject; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; public class InternalTestClustersPlugin implements Plugin { - private ProviderFactory providerFactory; - - @Inject - public InternalTestClustersPlugin(ProviderFactory providerFactory) { - this.providerFactory = providerFactory; - } - @Override public void apply(Project project) { project.getPlugins().apply(InternalDistributionDownloadPlugin.class); + project.getRootProject().getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + BuildParameterExtension buildParams = loadBuildParams(project).get(); project.getRootProject().getPluginManager().apply(InternalReaperPlugin.class); TestClustersPlugin testClustersPlugin = project.getPlugins().apply(TestClustersPlugin.class); - testClustersPlugin.setRuntimeJava(providerFactory.provider(() -> BuildParams.getRuntimeJavaHome())); + testClustersPlugin.setRuntimeJava(buildParams.getRuntimeJavaHome()); testClustersPlugin.setIsReleasedVersion( - version -> (version.equals(VersionProperties.getElasticsearchVersion()) && BuildParams.isSnapshotBuild() == false) - || BuildParams.getBwcVersions().unreleasedInfo(version) == null + version -> (version.equals(VersionProperties.getElasticsearchVersion()) && buildParams.isSnapshotBuild() == false) + || buildParams.getBwcVersions().unreleasedInfo(version) == null ); if (shouldConfigureTestClustersWithOneProcessor()) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index f988208ab4fe..d1585120b080 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -9,7 +9,8 @@ package org.elasticsearch.gradle.internal; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.JavaVersion; @@ -47,6 +48,7 @@ import javax.inject.Inject; import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; import static org.objectweb.asm.Opcodes.V_PREVIEW; public class MrjarPlugin implements Plugin { @@ -64,6 +66,8 @@ public class MrjarPlugin implements Plugin { @Override public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + var buildParams = loadBuildParams(project).get(); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); var isIdeaSync = System.getProperty("idea.sync.active", "false").equals("true"); var ideaSourceSetsEnabled = project.hasProperty(MRJAR_IDEA_ENABLED) && project.property(MRJAR_IDEA_ENABLED).equals("true"); @@ -89,7 +93,7 @@ public void apply(Project project) { String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); testSourceSets.add(testSourceSetName); - createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + createTestTask(project, buildParams, testSourceSet, javaVersion, mainSourceSets); } } @@ -163,7 +167,13 @@ private void configureSourceSetInJar(Project project, SourceSet sourceSet, int j jarTask.configure(task -> task.into("META-INF/versions/" + javaVersion, copySpec -> copySpec.from(sourceSet.getOutput()))); } - private void createTestTask(Project project, SourceSet sourceSet, int javaVersion, List mainSourceSets) { + private void createTestTask( + Project project, + BuildParameterExtension buildParams, + SourceSet sourceSet, + int javaVersion, + List mainSourceSets + ) { var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); var testTaskProvider = project.getTasks().register(JavaPlugin.TEST_TASK_NAME + javaVersion, Test.class); testTaskProvider.configure(testTask -> { @@ -180,9 +190,9 @@ private void createTestTask(Project project, SourceSet sourceSet, int javaVersio // only set the jdk if runtime java isn't set because setting the toolchain is incompatible with // runtime java setting the executable directly - if (BuildParams.getIsRuntimeJavaHomeSet()) { + if (buildParams.getIsRuntimeJavaHomeSet()) { testTask.onlyIf("runtime java must support java " + javaVersion, t -> { - JavaVersion runtimeJavaVersion = BuildParams.getRuntimeJavaVersion(); + JavaVersion runtimeJavaVersion = buildParams.getRuntimeJavaVersion().get(); return runtimeJavaVersion.isCompatibleWith(JavaVersion.toVersion(javaVersion)); }); } else { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportPlugin.java index 84a6432041ed..7348181c4199 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportPlugin.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.gradle.internal.docker; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; @@ -17,6 +18,8 @@ import java.util.List; import java.util.stream.Collectors; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; + /** * Plugin providing {@link DockerSupportService} for detecting Docker installations and determining requirements for Docker-based * Elasticsearch build tasks. @@ -30,11 +33,14 @@ public void apply(Project project) { if (project != project.getRootProject()) { throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project."); } + project.getPlugins().apply(GlobalBuildInfoPlugin.class); + var buildParams = loadBuildParams(project).get(); Provider dockerSupportServiceProvider = project.getGradle() .getSharedServices() .registerIfAbsent(DOCKER_SUPPORT_SERVICE_NAME, DockerSupportService.class, spec -> spec.parameters(params -> { params.setExclusionsFile(new File(project.getRootDir(), DOCKER_ON_LINUX_EXCLUSIONS_FILE)); + params.getIsCI().set(buildParams.isCi()); })); // Ensure that if we are trying to run any DockerBuildTask tasks, we assert an available Docker installation exists diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java index 344a477e74ef..f40f5d932b70 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java @@ -13,10 +13,10 @@ import org.elasticsearch.gradle.Architecture; import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.Version; -import org.elasticsearch.gradle.internal.info.BuildParams; import org.gradle.api.GradleException; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Property; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; import org.gradle.api.services.BuildServiceParameters; @@ -59,7 +59,6 @@ public abstract class DockerSupportService implements BuildService serviceInfos; private Map> tcpPorts; private Map> udpPorts; @@ -228,7 +227,7 @@ private boolean isExcludedOs() { // We don't attempt to check the current flavor and version of Linux unless we're // running in CI, because we don't want to stop people running the Docker tests in // their own environments if they really want to. - if (BuildParams.isCi() == false) { + if (getParameters().getIsCI().get().booleanValue() == false) { return false; } @@ -356,10 +355,6 @@ public Map> getUdpPorts() { return udpPorts; } - public void setServiceInfos(Map serviceInfos) { - this.serviceInfos = serviceInfos; - } - /** * An immutable class that represents the results of a Docker search from {@link #getDockerAvailability()}}. */ @@ -402,5 +397,7 @@ interface Parameters extends BuildServiceParameters { File getExclusionsFile(); void setExclusionsFile(File exclusionsFile); + + Property getIsCI(); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java new file mode 100644 index 000000000000..5531194e0abd --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info; + +import org.elasticsearch.gradle.internal.BwcVersions; +import org.gradle.api.Action; +import org.gradle.api.JavaVersion; +import org.gradle.api.Task; +import org.gradle.api.provider.Property; +import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; +import org.gradle.jvm.toolchain.JavaToolchainSpec; + +import java.io.File; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.List; +import java.util.Random; +import java.util.concurrent.atomic.AtomicReference; + +public abstract class BuildParameterExtension { + private final Provider inFipsJvm; + private final Provider runtimeJavaHome; + private final Boolean isRuntimeJavaHomeSet; + private final List javaVersions; + private final JavaVersion minimumCompilerVersion; + private final JavaVersion minimumRuntimeVersion; + private final JavaVersion gradleJavaVersion; + private final Provider runtimeJavaVersion; + private final Provider> javaToolChainSpec; + private final Provider runtimeJavaDetails; + private final String gitRevision; + private transient AtomicReference buildDate = new AtomicReference<>(); + private final String testSeed; + private final Boolean isCi; + private final Integer defaultParallel; + private final Boolean isSnapshotBuild; + + public BuildParameterExtension( + ProviderFactory providers, + Provider runtimeJavaHome, + Provider> javaToolChainSpec, + Provider runtimeJavaVersion, + boolean isRuntimeJavaHomeSet, + Provider runtimeJavaDetails, + List javaVersions, + JavaVersion minimumCompilerVersion, + JavaVersion minimumRuntimeVersion, + JavaVersion gradleJavaVersion, + String gitRevision, + String gitOrigin, + ZonedDateTime buildDate, + String testSeed, + boolean isCi, + int defaultParallel, + final boolean isSnapshotBuild, + Provider bwcVersions + ) { + this.inFipsJvm = providers.systemProperty("tests.fips.enabled").map(BuildParameterExtension::parseBoolean); + this.runtimeJavaHome = runtimeJavaHome; + this.javaToolChainSpec = javaToolChainSpec; + this.runtimeJavaVersion = runtimeJavaVersion; + this.isRuntimeJavaHomeSet = isRuntimeJavaHomeSet; + this.runtimeJavaDetails = runtimeJavaDetails; + this.javaVersions = javaVersions; + this.minimumCompilerVersion = minimumCompilerVersion; + this.minimumRuntimeVersion = minimumRuntimeVersion; + this.gradleJavaVersion = gradleJavaVersion; + this.gitRevision = gitRevision; + this.testSeed = testSeed; + this.isCi = isCi; + this.defaultParallel = defaultParallel; + this.isSnapshotBuild = isSnapshotBuild; + this.getBwcVersionsProperty().set(bwcVersions); + this.getGitOriginProperty().set(gitOrigin); + } + + private static boolean parseBoolean(String s) { + if (s == null) { + return false; + } + return Boolean.parseBoolean(s); + } + + public boolean getInFipsJvm() { + return inFipsJvm.getOrElse(false); + } + + public Provider getRuntimeJavaHome() { + return runtimeJavaHome; + } + + public void withFipsEnabledOnly(Task task) { + task.onlyIf("FIPS mode disabled", task1 -> getInFipsJvm() == false); + } + + public Boolean getIsRuntimeJavaHomeSet() { + return isRuntimeJavaHomeSet; + } + + public List getJavaVersions() { + return javaVersions; + } + + public JavaVersion getMinimumCompilerVersion() { + return minimumCompilerVersion; + } + + public JavaVersion getMinimumRuntimeVersion() { + return minimumRuntimeVersion; + } + + public JavaVersion getGradleJavaVersion() { + return gradleJavaVersion; + } + + public Provider getRuntimeJavaVersion() { + return runtimeJavaVersion; + } + + public Provider> getJavaToolChainSpec() { + return javaToolChainSpec; + } + + public Provider getRuntimeJavaDetails() { + return runtimeJavaDetails; + } + + public String getGitRevision() { + return gitRevision; + } + + public String getGitOrigin() { + return getGitOriginProperty().get(); + } + + public ZonedDateTime getBuildDate() { + ZonedDateTime value = buildDate.get(); + if (value == null) { + value = ZonedDateTime.now(ZoneOffset.UTC); + if (buildDate.compareAndSet(null, value) == false) { + // If another thread initialized it first, return the initialized value + value = buildDate.get(); + } + } + return value; + } + + public String getTestSeed() { + return testSeed; + } + + public Boolean isCi() { + return isCi; + } + + public Integer getDefaultParallel() { + return defaultParallel; + } + + public Boolean isSnapshotBuild() { + return isSnapshotBuild; + } + + public BwcVersions getBwcVersions() { + return getBwcVersionsProperty().get(); + } + + public abstract Property getBwcVersionsProperty(); + + public abstract Property getGitOriginProperty(); + + public Random getRandom() { + return new Random(Long.parseUnsignedLong(testSeed.split(":")[0], 16)); + } + + public Boolean isGraalVmRuntime() { + return runtimeJavaDetails.get().toLowerCase().contains("graalvm"); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterService.java new file mode 100644 index 000000000000..ec1bc4aec132 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterService.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info; + +import org.gradle.api.provider.Property; +import org.gradle.api.services.BuildService; +import org.gradle.api.services.BuildServiceParameters; + +public abstract class BuildParameterService implements BuildService, AutoCloseable { + public interface Params extends BuildServiceParameters { + Property getBuildParams(); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java index d3afeed9f857..ea8aeda8fc09 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java @@ -8,43 +8,13 @@ */ package org.elasticsearch.gradle.internal.info; -import org.elasticsearch.gradle.internal.BwcVersions; -import org.gradle.api.Action; -import org.gradle.api.JavaVersion; -import org.gradle.api.Task; -import org.gradle.api.provider.Provider; -import org.gradle.jvm.toolchain.JavaToolchainSpec; - -import java.io.File; -import java.io.IOException; import java.lang.reflect.Modifier; -import java.time.ZonedDateTime; import java.util.Arrays; -import java.util.List; -import java.util.Random; import java.util.function.Consumer; -import static java.util.Objects.requireNonNull; - +@Deprecated public class BuildParams { - private static Provider runtimeJavaHome; - private static Boolean isRuntimeJavaHomeSet; - private static List javaVersions; - private static JavaVersion minimumCompilerVersion; - private static JavaVersion minimumRuntimeVersion; - private static JavaVersion gradleJavaVersion; - private static Provider runtimeJavaVersion; - private static Provider> javaToolChainSpec; - private static Provider runtimeJavaDetails; - private static Boolean inFipsJvm; - private static String gitRevision; - private static String gitOrigin; - private static ZonedDateTime buildDate; - private static String testSeed; private static Boolean isCi; - private static Integer defaultParallel; - private static Boolean isSnapshotBuild; - private static Provider bwcVersions; /** * Initialize global build parameters. This method accepts and a initialization function which in turn accepts a @@ -58,90 +28,10 @@ public static void init(Consumer initializer) { initializer.accept(MutableBuildParams.INSTANCE); } - public static File getRuntimeJavaHome() { - return value(runtimeJavaHome).get(); - } - - public static Boolean getIsRuntimeJavaHomeSet() { - return value(isRuntimeJavaHomeSet); - } - - public static List getJavaVersions() { - return value(javaVersions); - } - - public static JavaVersion getMinimumCompilerVersion() { - return value(minimumCompilerVersion); - } - - public static JavaVersion getMinimumRuntimeVersion() { - return value(minimumRuntimeVersion); - } - - public static JavaVersion getGradleJavaVersion() { - return value(gradleJavaVersion); - } - - public static JavaVersion getRuntimeJavaVersion() { - return value(runtimeJavaVersion.get()); - } - - public static String getRuntimeJavaDetails() { - return value(runtimeJavaDetails.get()); - } - - public static Boolean isInFipsJvm() { - return value(inFipsJvm); - } - - public static void withFipsEnabledOnly(Task task) { - task.onlyIf("FIPS mode disabled", task1 -> isInFipsJvm() == false); - } - - public static String getGitRevision() { - return value(gitRevision); - } - - public static String getGitOrigin() { - return value(gitOrigin); - } - - public static ZonedDateTime getBuildDate() { - return value(buildDate); - } - - public static BwcVersions getBwcVersions() { - return value(bwcVersions).get(); - } - - public static String getTestSeed() { - return value(testSeed); - } - - public static Random getRandom() { - return new Random(Long.parseUnsignedLong(testSeed.split(":")[0], 16)); - } - public static Boolean isCi() { return value(isCi); } - public static Boolean isGraalVmRuntime() { - return value(runtimeJavaDetails.get().toLowerCase().contains("graalvm")); - } - - public static Integer getDefaultParallel() { - return value(defaultParallel); - } - - public static boolean isSnapshotBuild() { - return value(BuildParams.isSnapshotBuild); - } - - public static Provider> getJavaToolChainSpec() { - return javaToolChainSpec; - } - private static T value(T object) { if (object == null) { String callingMethod = Thread.currentThread().getStackTrace()[2].getMethodName(); @@ -183,82 +73,8 @@ public void reset() { }); } - public void setRuntimeJavaHome(Provider runtimeJavaHome) { - BuildParams.runtimeJavaHome = runtimeJavaHome.map(javaHome -> { - try { - return javaHome.getCanonicalFile(); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - } - - public void setIsRuntimeJavaHomeSet(boolean isRuntimeJavaHomeSet) { - BuildParams.isRuntimeJavaHomeSet = isRuntimeJavaHomeSet; - } - - public void setJavaVersions(List javaVersions) { - BuildParams.javaVersions = requireNonNull(javaVersions); - } - - public void setMinimumCompilerVersion(JavaVersion minimumCompilerVersion) { - BuildParams.minimumCompilerVersion = requireNonNull(minimumCompilerVersion); - } - - public void setMinimumRuntimeVersion(JavaVersion minimumRuntimeVersion) { - BuildParams.minimumRuntimeVersion = requireNonNull(minimumRuntimeVersion); - } - - public void setGradleJavaVersion(JavaVersion gradleJavaVersion) { - BuildParams.gradleJavaVersion = requireNonNull(gradleJavaVersion); - } - - public void setRuntimeJavaVersion(Provider runtimeJavaVersion) { - BuildParams.runtimeJavaVersion = requireNonNull(runtimeJavaVersion); - } - - public void setRuntimeJavaDetails(Provider runtimeJavaDetails) { - BuildParams.runtimeJavaDetails = runtimeJavaDetails; - } - - public void setInFipsJvm(boolean inFipsJvm) { - BuildParams.inFipsJvm = inFipsJvm; - } - - public void setGitRevision(String gitRevision) { - BuildParams.gitRevision = requireNonNull(gitRevision); - } - - public void setGitOrigin(String gitOrigin) { - BuildParams.gitOrigin = requireNonNull(gitOrigin); - } - - public void setBuildDate(ZonedDateTime buildDate) { - BuildParams.buildDate = requireNonNull(buildDate); - } - - public void setTestSeed(String testSeed) { - BuildParams.testSeed = requireNonNull(testSeed); - } - public void setIsCi(boolean isCi) { BuildParams.isCi = isCi; } - - public void setDefaultParallel(int defaultParallel) { - BuildParams.defaultParallel = defaultParallel; - } - - public void setIsSnapshotBuild(final boolean isSnapshotBuild) { - BuildParams.isSnapshotBuild = isSnapshotBuild; - } - - public void setBwcVersions(Provider bwcVersions) { - BuildParams.bwcVersions = requireNonNull(bwcVersions); - } - - public void setJavaToolChainSpec(Provider> javaToolChain) { - BuildParams.javaToolChainSpec = javaToolChain; - } } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 2d5a28bdd6af..761b0601a1c2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -58,6 +58,8 @@ import javax.inject.Inject; +import static org.elasticsearch.gradle.internal.conventions.GUtils.elvis; + public class GlobalBuildInfoPlugin implements Plugin { private static final Logger LOGGER = Logging.getLogger(GlobalBuildInfoPlugin.class); private static final String DEFAULT_VERSION_JAVA_FILE_PATH = "server/src/main/java/org/elasticsearch/Version.java"; @@ -67,6 +69,7 @@ public class GlobalBuildInfoPlugin implements Plugin { private final JvmMetadataDetector metadataDetector; private final ProviderFactory providers; private JavaToolchainService toolChainService; + private Project project; @Inject public GlobalBuildInfoPlugin( @@ -87,6 +90,7 @@ public void apply(Project project) { if (project != project.getRootProject()) { throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project."); } + this.project = project; project.getPlugins().apply(JvmToolchainsPlugin.class); toolChainService = project.getExtensions().getByType(JavaToolchainService.class); GradleVersion minimumGradleVersion = GradleVersion.version(getResourceContents("/minimumGradleVersion")); @@ -98,55 +102,61 @@ public void apply(Project project) { JavaVersion minimumRuntimeVersion = JavaVersion.toVersion(getResourceContents("/minimumRuntimeVersion")); Provider explicitRuntimeJavaHome = findRuntimeJavaHome(); - boolean isExplicitRuntimeJavaHomeSet = explicitRuntimeJavaHome.isPresent(); - Provider actualRuntimeJavaHome = isExplicitRuntimeJavaHomeSet + boolean isRuntimeJavaHomeExplicitlySet = explicitRuntimeJavaHome.isPresent(); + Provider actualRuntimeJavaHome = isRuntimeJavaHomeExplicitlySet ? explicitRuntimeJavaHome : resolveJavaHomeFromToolChainService(VersionProperties.getBundledJdkMajorVersion()); GitInfo gitInfo = GitInfo.gitInfo(project.getRootDir()); - BuildParams.init(params -> { - params.reset(); - params.setRuntimeJavaHome(actualRuntimeJavaHome); - params.setJavaToolChainSpec(resolveToolchainSpecFromEnv()); - Provider runtimeJdkMetaData = actualRuntimeJavaHome.map( - runtimeJavaHome -> metadataDetector.getMetadata(getJavaInstallation(runtimeJavaHome)) - ); - params.setRuntimeJavaVersion( + Provider runtimeJdkMetaData = actualRuntimeJavaHome.map( + runtimeJavaHome -> metadataDetector.getMetadata(getJavaInstallation(runtimeJavaHome)) + ); + AtomicReference cache = new AtomicReference<>(); + Provider bwcVersionsProvider = providers.provider( + () -> cache.updateAndGet(val -> val == null ? resolveBwcVersions() : val) + ); + BuildParameterExtension buildParams = project.getExtensions() + .create( + "buildParams", + BuildParameterExtension.class, + actualRuntimeJavaHome, + resolveToolchainSpecFromEnv(), actualRuntimeJavaHome.map( javaHome -> determineJavaVersion( "runtime java.home", javaHome, - isExplicitRuntimeJavaHomeSet + isRuntimeJavaHomeExplicitlySet ? minimumRuntimeVersion : JavaVersion.toVersion(VersionProperties.getBundledJdkMajorVersion()) ) - ) + ), + isRuntimeJavaHomeExplicitlySet, + runtimeJdkMetaData.map(m -> formatJavaVendorDetails(m)), + getAvailableJavaVersions(), + minimumCompilerVersion, + minimumRuntimeVersion, + Jvm.current().getJavaVersion(), + gitInfo.getRevision(), + gitInfo.getOrigin(), + ZonedDateTime.now(ZoneOffset.UTC), + getTestSeed(), + System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null, + ParallelDetector.findDefaultParallel(project), + Util.getBooleanProperty("build.snapshot", true), + bwcVersionsProvider ); - params.setIsRuntimeJavaHomeSet(isExplicitRuntimeJavaHomeSet); - params.setRuntimeJavaDetails(runtimeJdkMetaData.map(m -> formatJavaVendorDetails(m))); - params.setJavaVersions(getAvailableJavaVersions()); - params.setMinimumCompilerVersion(minimumCompilerVersion); - params.setMinimumRuntimeVersion(minimumRuntimeVersion); - params.setGradleJavaVersion(Jvm.current().getJavaVersion()); - params.setGitRevision(gitInfo.getRevision()); - params.setGitOrigin(gitInfo.getOrigin()); - params.setBuildDate(ZonedDateTime.now(ZoneOffset.UTC)); - params.setTestSeed(getTestSeed()); + + project.getGradle().getSharedServices().registerIfAbsent("buildParams", BuildParameterService.class, spec -> { + // Provide some parameters + spec.getParameters().getBuildParams().set(buildParams); + }); + + BuildParams.init(params -> { + params.reset(); params.setIsCi( System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null ); - params.setDefaultParallel(ParallelDetector.findDefaultParallel(project)); - params.setInFipsJvm(Util.getBooleanProperty("tests.fips.enabled", false)); - params.setIsSnapshotBuild(Util.getBooleanProperty("build.snapshot", true)); - AtomicReference cache = new AtomicReference<>(); - params.setBwcVersions( - providers.provider( - () -> cache.updateAndGet( - val -> val == null ? resolveBwcVersions(Util.locateElasticsearchWorkspace(project.getGradle())) : val - ) - ) - ); }); // Enforce the minimum compiler version @@ -155,7 +165,7 @@ public void apply(Project project) { // Print global build info header just before task execution // Only do this if we are the root build of a composite if (GradleUtils.isIncludedBuild(project) == false) { - project.getGradle().getTaskGraph().whenReady(graph -> logGlobalBuildInfo()); + project.getGradle().getTaskGraph().whenReady(graph -> logGlobalBuildInfo(buildParams)); } } @@ -180,9 +190,12 @@ private String formatJavaVendorDetails(JvmInstallationMetadata runtimeJdkMetaDat /* Introspect all versions of ES that may be tested against for backwards * compatibility. It is *super* important that this logic is the same as the * logic in VersionUtils.java. */ - private static BwcVersions resolveBwcVersions(File root) { - File versionsFile = new File(root, DEFAULT_VERSION_JAVA_FILE_PATH); - try (var is = new FileInputStream(versionsFile)) { + private BwcVersions resolveBwcVersions() { + String versionsFilePath = elvis( + System.getProperty("BWC_VERSION_SOURCE"), + new File(Util.locateElasticsearchWorkspace(project.getGradle()), DEFAULT_VERSION_JAVA_FILE_PATH).getPath() + ); + try (var is = new FileInputStream(versionsFilePath)) { List versionLines = IOUtils.readLines(is, "UTF-8"); return new BwcVersions(versionLines); } catch (IOException e) { @@ -190,7 +203,7 @@ private static BwcVersions resolveBwcVersions(File root) { } } - private void logGlobalBuildInfo() { + private void logGlobalBuildInfo(BuildParameterExtension buildParams) { final String osName = System.getProperty("os.name"); final String osVersion = System.getProperty("os.version"); final String osArch = System.getProperty("os.arch"); @@ -202,14 +215,14 @@ private void logGlobalBuildInfo() { LOGGER.quiet("Elasticsearch Build Hamster says Hello!"); LOGGER.quiet(" Gradle Version : " + GradleVersion.current().getVersion()); LOGGER.quiet(" OS Info : " + osName + " " + osVersion + " (" + osArch + ")"); - if (BuildParams.getIsRuntimeJavaHomeSet()) { - JvmInstallationMetadata runtimeJvm = metadataDetector.getMetadata(getJavaInstallation(BuildParams.getRuntimeJavaHome())); + if (buildParams.getIsRuntimeJavaHomeSet()) { + JvmInstallationMetadata runtimeJvm = metadataDetector.getMetadata(getJavaInstallation(buildParams.getRuntimeJavaHome().get())); final String runtimeJvmVendorDetails = runtimeJvm.getVendor().getDisplayName(); final String runtimeJvmImplementationVersion = runtimeJvm.getJvmVersion(); final String runtimeVersion = runtimeJvm.getRuntimeVersion(); final String runtimeExtraDetails = runtimeJvmVendorDetails + ", " + runtimeVersion; LOGGER.quiet(" Runtime JDK Version : " + runtimeJvmImplementationVersion + " (" + runtimeExtraDetails + ")"); - LOGGER.quiet(" Runtime java.home : " + BuildParams.getRuntimeJavaHome()); + LOGGER.quiet(" Runtime java.home : " + buildParams.getRuntimeJavaHome().get()); LOGGER.quiet(" Gradle JDK Version : " + gradleJvmImplementationVersion + " (" + gradleJvmVendorDetails + ")"); LOGGER.quiet(" Gradle java.home : " + gradleJvm.getJavaHome()); } else { @@ -220,8 +233,8 @@ private void logGlobalBuildInfo() { if (javaToolchainHome != null) { LOGGER.quiet(" JAVA_TOOLCHAIN_HOME : " + javaToolchainHome); } - LOGGER.quiet(" Random Testing Seed : " + BuildParams.getTestSeed()); - LOGGER.quiet(" In FIPS 140 mode : " + BuildParams.isInFipsJvm()); + LOGGER.quiet(" Random Testing Seed : " + buildParams.getTestSeed()); + LOGGER.quiet(" In FIPS 140 mode : " + buildParams.getInFipsJvm()); LOGGER.quiet("======================================="); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java index f71c86b19a14..f1ec236efe64 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java @@ -11,7 +11,7 @@ import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.plugins.JavaBasePlugin; @@ -30,7 +30,7 @@ public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin { @Override public TaskProvider createTask(Project project) { project.getPluginManager().apply(JavaBasePlugin.class); - + var buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); // Create a convenience task for all checks (this does not conflict with extension, as it has higher priority in DSL): var forbiddenTask = project.getTasks() .register(FORBIDDEN_APIS_TASK_NAME, task -> { task.setDescription("Runs forbidden-apis checks."); }); @@ -57,7 +57,7 @@ public TaskProvider createTask(Project project) { t.setClassesDirs(sourceSet.getOutput().getClassesDirs()); t.dependsOn(resourcesTask); t.setClasspath(sourceSet.getRuntimeClasspath().plus(sourceSet.getCompileClasspath())); - t.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion().getMajorVersion()); + t.setTargetCompatibility(buildParams.getMinimumRuntimeVersion().getMajorVersion()); t.getBundledSignatures().set(BUNDLED_SIGNATURE_DEFAULTS); t.setSignaturesFiles( project.files( diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java index 80cece6074ab..f70e25a57e33 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java @@ -12,17 +12,19 @@ import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin; import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.component.ModuleComponentIdentifier; +import org.gradle.api.provider.Property; import org.gradle.api.tasks.TaskProvider; import java.io.File; import java.nio.file.Path; import static org.elasticsearch.gradle.internal.util.DependenciesUtils.createFileCollectionFromNonTransitiveArtifactsView; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { @@ -31,10 +33,14 @@ public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { @Override public TaskProvider createTask(Project project) { + project.getRootProject().getPlugins().apply(CompileOnlyResolvePlugin.class); + Property buildParams = loadBuildParams(project); + project.getPlugins().apply(CompileOnlyResolvePlugin.class); project.getConfigurations().create("forbiddenApisCliJar"); project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:3.6"); Configuration jdkJarHellConfig = project.getConfigurations().create(JDK_JAR_HELL_CONFIG_NAME); + if (project.getPath().equals(LIBS_ELASTICSEARCH_CORE_PROJECT_PATH) == false) { // Internal projects are not all plugins, so make sure the check is available // we are not doing this for this project itself to avoid jar hell with itself @@ -66,9 +72,12 @@ public TaskProvider createTask(Project project) { && ((ModuleComponentIdentifier) identifier).getGroup().startsWith("org.elasticsearch") == false ) ); + if (buildParams.get().getIsRuntimeJavaHomeSet()) { + t.getRuntimeJavaVersion().set(buildParams.get().getRuntimeJavaVersion()); + } t.dependsOn(resourcesTask); - t.getTargetCompatibility().set(project.provider(BuildParams::getRuntimeJavaVersion)); - t.getJavaHome().set(project.provider(BuildParams::getRuntimeJavaHome).map(File::getPath)); + t.getTargetCompatibility().set(buildParams.flatMap(params -> params.getRuntimeJavaVersion())); + t.getJavaHome().set(buildParams.flatMap(params -> params.getRuntimeJavaHome()).map(File::getPath)); t.setSignatureFile(resourcesDir.resolve("forbidden/third-party-audit.txt").toFile()); t.getJdkJarHellClasspath().from(jdkJarHellConfig); t.getForbiddenAPIsClasspath().from(project.getConfigurations().getByName("forbiddenApisCliJar").plus(compileOnly)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java index 7afee8acdd4d..442797775de2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java @@ -13,7 +13,6 @@ import org.apache.commons.io.output.NullOutputStream; import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.VersionProperties; -import org.elasticsearch.gradle.internal.info.BuildParams; import org.gradle.api.DefaultTask; import org.gradle.api.JavaVersion; import org.gradle.api.file.ArchiveOperations; @@ -194,6 +193,10 @@ public Set getMissingClassExcludes() { @SkipWhenEmpty public abstract ConfigurableFileCollection getJarsToScan(); + @Input + @Optional + public abstract Property getRuntimeJavaVersion(); + @Classpath public FileCollection getClasspath() { return classpath; @@ -371,14 +374,10 @@ private String runForbiddenAPIsCli() throws IOException { /** Returns true iff the build Java version is the same as the given version. */ private boolean isJavaVersion(JavaVersion version) { - if (BuildParams.getIsRuntimeJavaHomeSet()) { - if (version.equals(BuildParams.getRuntimeJavaVersion())) { - return true; - } - } else if (version.getMajorVersion().equals(VersionProperties.getBundledJdkMajorVersion())) { - return true; + if (getRuntimeJavaVersion().isPresent()) { + return getRuntimeJavaVersion().get().equals(version); } - return false; + return version.getMajorVersion().equals(VersionProperties.getBundledJdkMajorVersion()); } private Set runJdkJarHellCheck() throws IOException { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/GenerateSnykDependencyGraph.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/GenerateSnykDependencyGraph.java index 31c6b503d732..b19c1207d56f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/GenerateSnykDependencyGraph.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/GenerateSnykDependencyGraph.java @@ -11,7 +11,6 @@ import groovy.json.JsonOutput; -import org.elasticsearch.gradle.internal.info.BuildParams; import org.gradle.api.DefaultTask; import org.gradle.api.GradleException; import org.gradle.api.artifacts.Configuration; @@ -118,7 +117,7 @@ private Map> projectAttributesData() { } private Object buildTargetData() { - return Map.of("remoteUrl", remoteUrl.get(), "branch", BuildParams.getGitRevision()); + return Map.of("remoteUrl", remoteUrl.get(), "branch", getGitRevision().get()); } @InputFiles @@ -160,4 +159,9 @@ public Property getRemoteUrl() { public Property getTargetReference() { return targetReference; } + + @Input + public Property getGitRevision() { + return targetReference; + } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java index b3e3d7f7c004..fa10daf8dfaa 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java @@ -10,17 +10,22 @@ package org.elasticsearch.gradle.internal.snyk; import org.elasticsearch.gradle.internal.conventions.info.GitInfo; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.ProjectLayout; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.provider.Property; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.SourceSet; import javax.inject.Inject; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; + public class SnykDependencyMonitoringGradlePlugin implements Plugin { public static final String UPLOAD_TASK_NAME = "uploadSnykDependencyGraph"; @@ -35,10 +40,14 @@ public SnykDependencyMonitoringGradlePlugin(ProjectLayout projectLayout, Provide @Override public void apply(Project project) { + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + Property buildParams = loadBuildParams(project); + var generateTaskProvider = project.getTasks() .register("generateSnykDependencyGraph", GenerateSnykDependencyGraph.class, generateSnykDependencyGraph -> { generateSnykDependencyGraph.getProjectPath().set(project.getPath()); generateSnykDependencyGraph.getProjectName().set(project.getName()); + generateSnykDependencyGraph.getGitRevision().set(buildParams.get().getGitRevision()); String projectVersion = project.getVersion().toString(); generateSnykDependencyGraph.getVersion().set(projectVersion); generateSnykDependencyGraph.getGradleVersion().set(project.getGradle().getGradleVersion()); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index 8e7884888b63..e8d2bbd93ff2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -16,11 +16,11 @@ import org.elasticsearch.gradle.ElasticsearchDistributionType; import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.internal.BwcVersions; import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin; import org.elasticsearch.gradle.internal.JdkDownloadPlugin; import org.elasticsearch.gradle.internal.docker.DockerSupportPlugin; import org.elasticsearch.gradle.internal.docker.DockerSupportService; -import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Action; @@ -54,6 +54,7 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; /** * This class defines gradle tasks for testing our various distribution artifacts. @@ -72,6 +73,7 @@ public void apply(Project project) { project.getPlugins().apply(InternalDistributionDownloadPlugin.class); project.getPlugins().apply(JdkDownloadPlugin.class); project.getPluginManager().apply("elasticsearch.java"); + var buildParams = loadBuildParams(project).get(); Provider dockerSupport = GradleUtils.getBuildService( project.getGradle().getSharedServices(), @@ -84,7 +86,7 @@ public void apply(Project project) { List testDistributions = configureDistributions(project); Map> lifecycleTasks = lifecycleTasks(project, "destructiveDistroTest"); - Map> versionTasks = versionTasks(project, "destructiveDistroUpgradeTest"); + Map> versionTasks = versionTasks(project, "destructiveDistroUpgradeTest", buildParams.getBwcVersions()); TaskProvider destructiveDistroTest = project.getTasks().register("destructiveDistroTest"); Configuration examplePlugin = configureExamplePlugin(project); @@ -115,7 +117,7 @@ public void apply(Project project) { lifecycleTask.configure(t -> t.dependsOn(destructiveTask)); if ((type == DEB || type == RPM) && distribution.getBundledJdk()) { - for (Version version : BuildParams.getBwcVersions().getIndexCompatible()) { + for (Version version : buildParams.getBwcVersions().getIndexCompatible()) { final ElasticsearchDistribution bwcDistro; if (version.equals(Version.fromString(distribution.getVersion()))) { // this is the same as the distribution we are testing @@ -156,10 +158,10 @@ private static Map> lifecycleTask return lifecyleTasks; } - private static Map> versionTasks(Project project, String taskPrefix) { + private static Map> versionTasks(Project project, String taskPrefix, BwcVersions bwcVersions) { Map> versionTasks = new HashMap<>(); - for (Version version : BuildParams.getBwcVersions().getIndexCompatible()) { + for (Version version : bwcVersions.getIndexCompatible()) { versionTasks.put(version.toString(), project.getTasks().register(taskPrefix + ".v" + version)); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/InternalClusterTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/InternalClusterTestPlugin.java index 3619c9c1ec76..e13c2544ae9c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/InternalClusterTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/InternalClusterTestPlugin.java @@ -9,7 +9,7 @@ package org.elasticsearch.gradle.internal.test; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.JavaVersion; import org.gradle.api.Plugin; @@ -18,16 +18,21 @@ import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.testing.Test; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; + public class InternalClusterTestPlugin implements Plugin { public static final String SOURCE_SET_NAME = "internalClusterTest"; @Override public void apply(Project project) { + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + var buildParams = loadBuildParams(project).get(); + TaskProvider internalClusterTest = GradleUtils.addTestSourceSet(project, SOURCE_SET_NAME); internalClusterTest.configure(task -> { // Set GC options to mirror defaults in jvm.options - if (BuildParams.getRuntimeJavaVersion().compareTo(JavaVersion.VERSION_14) < 0) { + if (buildParams.getRuntimeJavaVersion().get().compareTo(JavaVersion.VERSION_14) < 0) { task.jvmArgs("-XX:+UseConcMarkSweepGC", "-XX:CMSInitiatingOccupancyFraction=75", "-XX:+UseCMSInitiatingOccupancyOnly"); } else { task.jvmArgs("-XX:+UseG1GC"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java index fddddbd14d3a..c13a5f0e4d30 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java @@ -9,7 +9,7 @@ package org.elasticsearch.gradle.internal.test; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.file.RegularFile; @@ -19,6 +19,8 @@ import java.util.Arrays; import java.util.List; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; + public class MutedTestPlugin implements Plugin { private static final String ADDITIONAL_FILES_PROPERTY = "org.elasticsearch.additional.muted.tests"; @@ -32,6 +34,9 @@ public void apply(Project project) { .map(p -> project.getRootProject().getLayout().getProjectDirectory().file(p)) .toList(); + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + var buildParams = loadBuildParams(project).get(); + Provider mutedTestsProvider = project.getGradle() .getSharedServices() .registerIfAbsent("mutedTests", MutedTestsBuildService.class, spec -> { @@ -46,7 +51,7 @@ public void apply(Project project) { } // Don't fail when all tests are ignored when running in CI - filter.setFailOnNoMatchingTests(BuildParams.isCi() == false); + filter.setFailOnNoMatchingTests(buildParams.isCi() == false); }); }); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java index 564465fbb255..68711881b02f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java @@ -11,7 +11,7 @@ import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.precommit.FilePermissionsPrecommitPlugin; import org.elasticsearch.gradle.internal.precommit.ForbiddenPatternsPrecommitPlugin; import org.elasticsearch.gradle.internal.precommit.ForbiddenPatternsTask; @@ -35,6 +35,7 @@ public class TestWithSslPlugin implements Plugin { @Override public void apply(Project project) { File keyStoreDir = new File(project.getBuildDir(), "keystore"); + BuildParameterExtension buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); TaskProvider exportKeyStore = project.getTasks() .register("copyTestCertificates", ExportElasticsearchBuildResourcesTask.class, (t) -> { t.copy("test/ssl/test-client.crt"); @@ -87,7 +88,7 @@ public void apply(Project project) { .getExtensions() .getByName(TestClustersPlugin.EXTENSION_NAME); clusters.configureEach(c -> { - if (BuildParams.isInFipsJvm()) { + if (buildParams.getInFipsJvm()) { c.setting("xpack.security.transport.ssl.key", "test-node.key"); c.keystore("xpack.security.transport.ssl.secure_key_passphrase", "test-node-key-password"); c.setting("xpack.security.transport.ssl.certificate", "test-node.crt"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 777a6d931e50..548791b9496c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -20,7 +20,6 @@ import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes; import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin; import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin; -import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener; import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin; @@ -58,6 +57,8 @@ import javax.inject.Inject; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; + /** * Base plugin used for wiring up build tasks to REST testing tasks using new JUnit rule-based test clusters framework. */ @@ -92,6 +93,7 @@ public RestTestBasePlugin(ProviderFactory providerFactory) { public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); project.getPluginManager().apply(InternalDistributionDownloadPlugin.class); + var bwcVersions = loadBuildParams(project).get().getBwcVersions(); // Register integ-test and default distributions ElasticsearchDistribution defaultDistro = createDistribution( @@ -176,7 +178,7 @@ public void apply(Project project) { task.systemProperty("tests.system_call_filter", "false"); // Pass minimum wire compatible version which is used by upgrade tests - task.systemProperty(MINIMUM_WIRE_COMPATIBLE_VERSION_SYSPROP, BuildParams.getBwcVersions().getMinimumWireCompatibleVersion()); + task.systemProperty(MINIMUM_WIRE_COMPATIBLE_VERSION_SYSPROP, bwcVersions.getMinimumWireCompatibleVersion()); // Register plugins and modules as task inputs and pass paths as system properties to tests var modulePath = project.getObjects().fileCollection().from(modulesConfiguration); @@ -223,7 +225,7 @@ public Void call(Object... args) { } Version version = (Version) args[0]; - boolean isReleased = BuildParams.getBwcVersions().unreleasedInfo(version) == null; + boolean isReleased = bwcVersions.unreleasedInfo(version) == null; String versionString = version.toString(); ElasticsearchDistribution bwcDistro = createDistribution(project, "bwc_" + versionString, versionString); @@ -235,9 +237,9 @@ public Void call(Object... args) { providerFactory.provider(() -> bwcDistro.getExtracted().getSingleFile().getPath()) ); - if (version.getMajor() > 0 && version.before(BuildParams.getBwcVersions().getMinimumWireCompatibleVersion())) { + if (version.getMajor() > 0 && version.before(bwcVersions.getMinimumWireCompatibleVersion())) { // If we are upgrade testing older versions we also need to upgrade to 7.last - this.call(BuildParams.getBwcVersions().getMinimumWireCompatibleVersion()); + this.call(bwcVersions.getMinimumWireCompatibleVersion()); } return null; } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java index fe305b8b46cf..61dea47eb15c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java @@ -11,7 +11,8 @@ import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.test.rest.CopyRestApiTask; import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask; import org.elasticsearch.gradle.internal.test.rest.LegacyYamlRestTestPlugin; @@ -47,6 +48,7 @@ import javax.inject.Inject; import static org.elasticsearch.gradle.internal.test.rest.RestTestUtil.setupYamlRestTestDependenciesDefaults; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; /** * Apply this plugin to run the YAML based REST tests from a prior major version against this version's cluster. @@ -74,6 +76,8 @@ public AbstractYamlRestCompatTestPlugin(ProjectLayout projectLayout, FileOperati @Override public void apply(Project project) { + project.getRootProject().getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + BuildParameterExtension buildParams = loadBuildParams(project).get(); final Path compatRestResourcesDir = Path.of("restResources").resolve("compat"); final Path compatSpecsDir = compatRestResourcesDir.resolve("yamlSpecs"); @@ -91,14 +95,14 @@ public void apply(Project project) { GradleUtils.extendSourceSet(project, YamlRestTestPlugin.YAML_REST_TEST, SOURCE_SET_NAME); // determine the previous rest compatibility version and BWC project path - int currentMajor = BuildParams.getBwcVersions().getCurrentVersion().getMajor(); - Version lastMinor = BuildParams.getBwcVersions() + int currentMajor = buildParams.getBwcVersions().getCurrentVersion().getMajor(); + Version lastMinor = buildParams.getBwcVersions() .getUnreleased() .stream() .filter(v -> v.getMajor() == currentMajor - 1) .min(Comparator.reverseOrder()) .get(); - String lastMinorProjectPath = BuildParams.getBwcVersions().unreleasedInfo(lastMinor).gradleProjectPath(); + String lastMinorProjectPath = buildParams.getBwcVersions().unreleasedInfo(lastMinor).gradleProjectPath(); // copy compatible rest specs Configuration bwcMinorConfig = project.getConfigurations().create(BWC_MINOR_CONFIG_NAME); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java index 53fb4c61e151..a934164d11af 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java @@ -12,7 +12,7 @@ import org.apache.commons.lang.StringUtils; import org.elasticsearch.gradle.Architecture; import org.elasticsearch.gradle.internal.docker.DockerBuildTask; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -20,6 +20,8 @@ import java.util.Arrays; import java.util.List; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; + public class TestFixturesDeployPlugin implements Plugin { public static final String DEPLOY_FIXTURE_TASK_NAME = "deployFixtureDockerImages"; @@ -27,13 +29,19 @@ public class TestFixturesDeployPlugin implements Plugin { @Override public void apply(Project project) { + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + var buildParams = loadBuildParams(project).get(); NamedDomainObjectContainer fixtures = project.container(TestFixtureDeployment.class); project.getExtensions().add("dockerFixtures", fixtures); - registerDeployTaskPerFixture(project, fixtures); + registerDeployTaskPerFixture(project, fixtures, buildParams.isCi()); project.getTasks().register(DEPLOY_FIXTURE_TASK_NAME, task -> task.dependsOn(project.getTasks().withType(DockerBuildTask.class))); } - private static void registerDeployTaskPerFixture(Project project, NamedDomainObjectContainer fixtures) { + private static void registerDeployTaskPerFixture( + Project project, + NamedDomainObjectContainer fixtures, + boolean isCi + ) { fixtures.all( fixture -> project.getTasks() .register("deploy" + StringUtils.capitalize(fixture.getName()) + "DockerImage", DockerBuildTask.class, task -> { @@ -42,12 +50,12 @@ private static void registerDeployTaskPerFixture(Project project, NamedDomainObj if (baseImages.isEmpty() == false) { task.setBaseImages(baseImages.toArray(new String[baseImages.size()])); } - task.setNoCache(BuildParams.isCi()); + task.setNoCache(isCi); task.setTags( new String[] { resolveTargetDockerRegistry(fixture) + "/" + fixture.getName() + "-fixture:" + fixture.getVersion().get() } ); - task.getPush().set(BuildParams.isCi()); + task.getPush().set(isCi); task.getPlatforms().addAll(Arrays.stream(Architecture.values()).map(a -> a.dockerPlatform).toList()); task.setGroup("Deploy TestFixtures"); task.setDescription("Deploys the " + fixture.getName() + " test fixture"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java index 504b081fd505..ab28a66d9306 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java @@ -17,7 +17,7 @@ import org.elasticsearch.gradle.internal.docker.DockerSupportPlugin; import org.elasticsearch.gradle.internal.docker.DockerSupportService; -import org.elasticsearch.gradle.internal.info.BuildParams; +import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Action; @@ -47,6 +47,8 @@ import javax.inject.Inject; +import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; + public class TestFixturesPlugin implements Plugin { private static final Logger LOGGER = Logging.getLogger(TestFixturesPlugin.class); @@ -68,6 +70,8 @@ protected FileSystemOperations getFileSystemOperations() { @Override public void apply(Project project) { project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); + project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); + var buildParams = loadBuildParams(project).get(); TaskContainer tasks = project.getTasks(); Provider dockerComposeThrottle = project.getGradle() @@ -127,7 +131,7 @@ public void apply(Project project) { tasks.withType(ComposeUp.class).named("composeUp").configure(t -> { // Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions - if (BuildParams.isCi()) { + if (buildParams.isCi()) { t.usesService(dockerComposeThrottle); t.usesService(dockerSupport); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java new file mode 100644 index 000000000000..1b019a6cbd3e --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.util; + +public class CiUtils { + + static String safeName(String input) { + return input.replaceAll("[^a-zA-Z0-9_\\-\\.]+", " ").trim().replaceAll(" ", "_").toLowerCase(); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/ParamsUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/ParamsUtils.java new file mode 100644 index 000000000000..0afe654bc5fb --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/ParamsUtils.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.util; + +import org.elasticsearch.gradle.internal.info.BuildParameterExtension; +import org.elasticsearch.gradle.internal.info.BuildParameterService; +import org.gradle.api.Project; +import org.gradle.api.provider.Property; +import org.gradle.api.services.BuildServiceRegistration; + +public class ParamsUtils { + + public static Property loadBuildParams(Project project) { + BuildServiceRegistration buildParamsRegistrations = (BuildServiceRegistration< + BuildParameterService, + BuildParameterService.Params>) project.getGradle().getSharedServices().getRegistrations().getByName("buildParams"); + Property buildParams = buildParamsRegistrations.getParameters().getBuildParams(); + return buildParams; + } + +} diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 5388f942be8d..a9da7995c2b3 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -119,7 +119,7 @@ java.time.zone.ZoneRules#getStandardOffset(java.time.Instant) java.time.zone.ZoneRules#getDaylightSavings(java.time.Instant) java.time.zone.ZoneRules#isDaylightSavings(java.time.Instant) -@defaultMessage Use logger methods with non-Object parameter +@defaultMessage The first parameter to a log4j log statement should be a String, a log4j Supplier (not java.util.function.Supplier), or another object that log4j supports. org.apache.logging.log4j.Logger#trace(java.lang.Object) org.apache.logging.log4j.Logger#trace(java.lang.Object, java.lang.Throwable) org.apache.logging.log4j.Logger#debug(java.lang.Object) diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index c5b7a44a19d3..9c7d20d84a67 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -17,232 +17,201 @@ import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo class BwcVersionsSpec extends Specification { List versionLines = [] - def "current version is next major with last minor staged"() { - given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.16.2', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.0.0') - - when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) - def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } - - then: - unreleased == [ - (v('7.16.2')): new UnreleasedVersionInfo(v('7.16.2'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') - ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.16.2'), v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.minimumWireCompatibleVersion == v('7.17.0') - } - def "current version is next minor with next major and last minor both staged"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.x', ':distribution:bwc:minor'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0'), v('9.1.0')] } def "current is next minor with upcoming minor staged"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:bugfix'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.0', ':distribution:bwc:staged'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] } def "current version is staged major"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:bugfix'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0')] } def "current version is major with unreleased next minor"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0')] } def "current version is major with staged next minor"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.15.2')): new UnreleasedVersionInfo(v('7.15.2'), '7.15', ':distribution:bwc:bugfix'), - (v('7.16.0')): new UnreleasedVersionInfo(v('7.16.0'), '7.16', ':distribution:bwc:staged'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), + (v('8.16.0')): new UnreleasedVersionInfo(v('8.16.0'), '8.16', ':distribution:bwc:staged'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.17.0'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.17.0'), v('9.0.0')] } def "current version is next bugfix"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.0.1', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.1')) + def bwc = new BwcVersions(versionLines, v('9.0.1')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), 'main', ':distribution'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] } def "current version is next minor with no staged releases"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.0.1', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), '8.0', ':distribution:bwc:bugfix'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] } private void addVersion(String elasticsearch, String lucene) { diff --git a/build-tools/build.gradle b/build-tools/build.gradle index 7fd01f0c3d4f..e457999fedfe 100644 --- a/build-tools/build.gradle +++ b/build-tools/build.gradle @@ -9,9 +9,6 @@ buildscript { repositories { - maven { - url 'https://jitpack.io' - } mavenCentral() } } @@ -117,9 +114,6 @@ configurations { } repositories { - maven { - url 'https://jitpack.io' - } mavenCentral() gradlePluginPortal() } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java index e3adfe8d2814..b3a792b41838 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java @@ -167,7 +167,7 @@ private static CopySpec createBundleSpec( copySpec.exclude("plugin-security.codebases"); }); bundleSpec.from( - (Callable>) () -> project.getPluginManager().hasPlugin("com.github.johnrengelman.shadow") + (Callable>) () -> project.getPluginManager().hasPlugin("com.gradleup.shadow") ? project.getTasks().named("shadowJar") : project.getTasks().named("jar") ); diff --git a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy index d3d06b2de357..f3f8e4703eba 100644 --- a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy +++ b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy @@ -168,7 +168,6 @@ abstract class AbstractGradleFuncTest extends Specification { ${extraPlugins.collect { p -> "id '$p'" }.join('\n')} } import org.elasticsearch.gradle.Architecture - import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.Version @@ -182,7 +181,7 @@ abstract class AbstractGradleFuncTest extends Specification { ] BwcVersions versions = new BwcVersions(currentVersion, versionList) - BuildParams.init { it.setBwcVersions(provider(() -> versions)) } + buildParams.getBwcVersionsProperty().set(versions) """ } diff --git a/build.gradle b/build.gradle index 71386a37cbb0..715614c1beea 100644 --- a/build.gradle +++ b/build.gradle @@ -17,7 +17,6 @@ import org.elasticsearch.gradle.DistributionDownloadPlugin import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.BaseInternalPluginBuildPlugin import org.elasticsearch.gradle.internal.ResolveAllDependencies -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.util.GradleUtils import org.gradle.plugins.ide.eclipse.model.AccessRule @@ -28,10 +27,6 @@ import static org.elasticsearch.gradle.util.GradleUtils.maybeConfigure buildscript { repositories { - maven { - url 'https://jitpack.io' - } - mavenCentral() } } @@ -143,23 +138,23 @@ tasks.register("updateCIBwcVersions") { } doLast { - writeVersions(file(".ci/bwcVersions"), filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible)) - writeVersions(file(".ci/snapshotBwcVersions"), filterIntermediatePatches(BuildParams.bwcVersions.unreleasedIndexCompatible)) + writeVersions(file(".ci/bwcVersions"), filterIntermediatePatches(buildParams.bwcVersions.indexCompatible)) + writeVersions(file(".ci/snapshotBwcVersions"), filterIntermediatePatches(buildParams.bwcVersions.unreleasedIndexCompatible)) expandBwcList( ".buildkite/pipelines/intake.yml", ".buildkite/pipelines/intake.template.yml", - filterIntermediatePatches(BuildParams.bwcVersions.unreleasedIndexCompatible) + filterIntermediatePatches(buildParams.bwcVersions.unreleasedIndexCompatible) ) writeBuildkitePipeline( ".buildkite/pipelines/periodic.yml", ".buildkite/pipelines/periodic.template.yml", [ - new ListExpansion(versions: filterIntermediatePatches(BuildParams.bwcVersions.unreleasedIndexCompatible), variable: "BWC_LIST"), + new ListExpansion(versions: filterIntermediatePatches(buildParams.bwcVersions.unreleasedIndexCompatible), variable: "BWC_LIST"), ], [ new StepExpansion( templatePath: ".buildkite/pipelines/periodic.bwc.template.yml", - versions: filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible), + versions: filterIntermediatePatches(buildParams.bwcVersions.indexCompatible), variable: "BWC_STEPS" ), ] @@ -169,7 +164,7 @@ tasks.register("updateCIBwcVersions") { ".buildkite/pipelines/periodic-packaging.yml", ".buildkite/pipelines/periodic-packaging.template.yml", ".buildkite/pipelines/periodic-packaging.bwc.template.yml", - filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible) + filterIntermediatePatches(buildParams.bwcVersions.indexCompatible) ) } } @@ -191,19 +186,19 @@ tasks.register("verifyVersions") { // Fetch the metadata and parse the xml into Version instances because it's more straight forward here // rather than bwcVersion ( VersionCollection ). new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> - BuildParams.bwcVersions.compareToAuthoritative( + buildParams.bwcVersions.compareToAuthoritative( new XmlParser().parse(s) .versioning.versions.version .collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ } .collect { Version.fromString(it) } ) } - verifyCiYaml(file(".ci/bwcVersions"), filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible)) - verifyCiYaml(file(".ci/snapshotBwcVersions"), BuildParams.bwcVersions.unreleasedIndexCompatible) + verifyCiYaml(file(".ci/bwcVersions"), filterIntermediatePatches(buildParams.bwcVersions.indexCompatible)) + verifyCiYaml(file(".ci/snapshotBwcVersions"), buildParams.bwcVersions.unreleasedIndexCompatible) // Make sure backport bot config file is up to date JsonNode backportConfig = new ObjectMapper().readTree(file(".backportrc.json")) - BuildParams.bwcVersions.forPreviousUnreleased { unreleasedVersion -> + buildParams.bwcVersions.forPreviousUnreleased { unreleasedVersion -> boolean valid = backportConfig.get("targetBranchChoices").elements().any { branchChoice -> if (branchChoice.isObject()) { return branchChoice.get("name").textValue() == unreleasedVersion.branch diff --git a/distribution/build.gradle b/distribution/build.gradle index 5b865b36f9e4..e3481706ef23 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -14,7 +14,6 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.ConcatFilesTask import org.elasticsearch.gradle.internal.DependenciesInfoPlugin import org.elasticsearch.gradle.internal.NoticeTask -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin import java.nio.file.Files @@ -208,7 +207,7 @@ project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each { distro.copyModule(processDefaultOutputsTaskProvider, module) dependencies.add('featuresMetadata', module) - if (module.name.startsWith('transport-') || (BuildParams.snapshotBuild == false && module.name == 'apm')) { + if (module.name.startsWith('transport-') || (buildParams.snapshotBuild == false && module.name == 'apm')) { distro.copyModule(processIntegTestOutputsTaskProvider, module) } @@ -378,7 +377,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { exclude "**/platform/${excludePlatform}/**" } } - if (BuildParams.isSnapshotBuild()) { + if (buildParams.isSnapshotBuild()) { from(buildExternalTestModulesTaskProvider) } if (project.path.startsWith(':distribution:packages')) { diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 788e836f8f04..d73f9c395f15 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -8,7 +8,6 @@ import org.elasticsearch.gradle.internal.docker.DockerSupportService import org.elasticsearch.gradle.internal.docker.ShellRetry import org.elasticsearch.gradle.internal.docker.TransformLog4jConfigFilter import org.elasticsearch.gradle.internal.docker.* -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.Architecture import java.nio.file.Path @@ -120,7 +119,7 @@ ext.expansions = { Architecture architecture, DockerBase base -> // the image. When developing the Docker images, it's very tedious to completely rebuild // an image for every single change. Therefore, outside of CI, we fix the // build time to midnight so that the Docker build cache is usable. - def buildDate = BuildParams.isCi() ? BuildParams.buildDate : BuildParams.buildDate.truncatedTo(ChronoUnit.DAYS).toString() + def buildDate = buildParams.isCi() ? buildParams.buildDate : buildParams.buildDate.truncatedTo(ChronoUnit.DAYS).toString() return [ 'arch' : architecture.classifier, @@ -128,7 +127,7 @@ ext.expansions = { Architecture architecture, DockerBase base -> 'bin_dir' : base == DockerBase.IRON_BANK ? 'scripts' : 'bin', 'build_date' : buildDate, 'config_dir' : base == DockerBase.IRON_BANK ? 'scripts' : 'config', - 'git_revision' : BuildParams.gitRevision, + 'git_revision' : buildParams.gitRevision, 'license' : base == DockerBase.IRON_BANK ? 'Elastic License 2.0' : 'Elastic-License-2.0', 'package_manager' : base.packageManager, 'docker_base' : base.name().toLowerCase(), @@ -390,7 +389,7 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { dockerContext.fileProvider(transformTask.map { Sync task -> task.getDestinationDir() }) - noCache = BuildParams.isCi() + noCache = buildParams.isCi() tags = generateTags(base, architecture) platforms.add(architecture.dockerPlatform) @@ -485,7 +484,7 @@ void addBuildEssDockerImageTask(Architecture architecture) { dockerContext.fileProvider(buildContextTask.map { it.getDestinationDir() }) - noCache = BuildParams.isCi() + noCache = buildParams.isCi() baseImages = [] tags = generateTags(dockerBase, architecture) platforms.add(architecture.dockerPlatform) diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index e08f16c14ab8..918980fea616 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -301,7 +301,7 @@ ospackage { url 'https://www.elastic.co/' // signing setup - if (project.hasProperty('signing.password') && BuildParams.isSnapshotBuild() == false) { + if (project.hasProperty('signing.password') && buildParams.isSnapshotBuild() == false) { signingKeyId = project.hasProperty('signing.keyId') ? project.property('signing.keyId') : 'D88E42B4' signingKeyPassphrase = project.property('signing.password') signingKeyRingFile = project.hasProperty('signing.secretKeyRingFile') ? diff --git a/docs/build.gradle b/docs/build.gradle index e495ecacce27..dec0de8ffa84 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -1,5 +1,4 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.doc.DocSnippetTask import static org.elasticsearch.gradle.testclusters.TestDistribution.DEFAULT @@ -29,7 +28,7 @@ ext.docsFileTree = fileTree(projectDir) { // These files simply don't pass yet. We should figure out how to fix them. exclude 'reference/watcher/reference/actions.asciidoc' exclude 'reference/rest-api/security/ssl.asciidoc' - if (BuildParams.inFipsJvm) { + if (buildParams.inFipsJvm) { // We don't support this component in FIPS 140 exclude 'reference/ingest/processors/attachment.asciidoc' // We can't conditionally control output, this would be missing the ingest-attachment component @@ -38,7 +37,7 @@ ext.docsFileTree = fileTree(projectDir) { } tasks.named("yamlRestTest") { - if (BuildParams.isSnapshotBuild() == false) { + if (buildParams.isSnapshotBuild() == false) { // LOOKUP is not available in snapshots systemProperty 'tests.rest.blacklist', [ "reference/esql/processing-commands/lookup/esql-lookup-example" @@ -83,7 +82,7 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' keystorePassword 'keystore-password' - if (BuildParams.isSnapshotBuild() == false) { + if (buildParams.isSnapshotBuild() == false) { requiresFeature 'es.failure_store_feature_flag_enabled', new Version(8, 12, 0) } } @@ -170,7 +169,7 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { return } // Do not install ingest-attachment in a FIPS 140 JVM as this is not supported - if (subproj.path.startsWith(':modules:ingest-attachment') && BuildParams.inFipsJvm) { + if (subproj.path.startsWith(':modules:ingest-attachment') && buildParams.inFipsJvm) { return } plugin subproj.path diff --git a/docs/changelog/115142.yaml b/docs/changelog/115142.yaml new file mode 100644 index 000000000000..2af968ae156d --- /dev/null +++ b/docs/changelog/115142.yaml @@ -0,0 +1,6 @@ +pr: 115142 +summary: Attempt to clean up index before remote transfer +area: Recovery +type: enhancement +issues: + - 104473 diff --git a/docs/changelog/115585.yaml b/docs/changelog/115585.yaml new file mode 100644 index 000000000000..02eecfc3d7d2 --- /dev/null +++ b/docs/changelog/115585.yaml @@ -0,0 +1,6 @@ +pr: 115459 +summary: Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter +area: Search +type: enhancement +issues: + - 97849 diff --git a/docs/changelog/115678.yaml b/docs/changelog/115678.yaml new file mode 100644 index 000000000000..31240eae1ebb --- /dev/null +++ b/docs/changelog/115678.yaml @@ -0,0 +1,5 @@ +pr: 115678 +summary: "ESQL: extract common filter from aggs" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/115930.yaml b/docs/changelog/115930.yaml new file mode 100644 index 000000000000..788a01b5cac9 --- /dev/null +++ b/docs/changelog/115930.yaml @@ -0,0 +1,5 @@ +pr: 115930 +summary: Inconsistency in the _analyzer api when the index is not included +area: Search +type: bug +issues: [] diff --git a/docs/changelog/116277.yaml b/docs/changelog/116277.yaml new file mode 100644 index 000000000000..62262b779778 --- /dev/null +++ b/docs/changelog/116277.yaml @@ -0,0 +1,6 @@ +pr: 116277 +summary: Update Semantic Query To Handle Zero Size Responses +area: Vector Search +type: bug +issues: + - 116083 diff --git a/docs/changelog/116676.yaml b/docs/changelog/116676.yaml new file mode 100644 index 000000000000..8c6671e17749 --- /dev/null +++ b/docs/changelog/116676.yaml @@ -0,0 +1,5 @@ +pr: 116676 +summary: Fix handling of time exceeded exception in fetch phase +area: Search +type: bug +issues: [] diff --git a/docs/changelog/116915.yaml b/docs/changelog/116915.yaml new file mode 100644 index 000000000000..9686f0023a14 --- /dev/null +++ b/docs/changelog/116915.yaml @@ -0,0 +1,5 @@ +pr: 116915 +summary: Improve message about insecure S3 settings +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/changelog/116918.yaml b/docs/changelog/116918.yaml new file mode 100644 index 000000000000..3b04b4ae4a69 --- /dev/null +++ b/docs/changelog/116918.yaml @@ -0,0 +1,5 @@ +pr: 116918 +summary: Split searchable snapshot into multiple repo operations +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/changelog/116922.yaml b/docs/changelog/116922.yaml new file mode 100644 index 000000000000..39e63da50ea2 --- /dev/null +++ b/docs/changelog/116922.yaml @@ -0,0 +1,5 @@ +pr: 116922 +summary: Always check if index mode is logsdb +area: Logs +type: bug +issues: [] diff --git a/docs/changelog/116931.yaml b/docs/changelog/116931.yaml new file mode 100644 index 000000000000..8b31d236ff13 --- /dev/null +++ b/docs/changelog/116931.yaml @@ -0,0 +1,5 @@ +pr: 116931 +summary: Enable built-in Inference Endpoints and default for Semantic Text +area: "Machine Learning" +type: enhancement +issues: [] diff --git a/docs/changelog/116942.yaml b/docs/changelog/116942.yaml new file mode 100644 index 000000000000..5037e8c59cd8 --- /dev/null +++ b/docs/changelog/116942.yaml @@ -0,0 +1,5 @@ +pr: 116942 +summary: Fix handling of bulk requests with semantic text fields and delete ops +area: Relevance +type: bug +issues: [] diff --git a/docs/changelog/116943.yaml b/docs/changelog/116943.yaml new file mode 100644 index 000000000000..3fd0793610cd --- /dev/null +++ b/docs/changelog/116943.yaml @@ -0,0 +1,11 @@ +pr: 116943 +summary: Remove support for deprecated `force_source` highlighting parameter +area: Highlighting +type: breaking +issues: [] +breaking: + title: Remove support for deprecated `force_source` highlighting parameter + area: REST API + details: The deprecated highlighting `force_source` parameter is no longer supported. + impact: Users should remove usages of the `force_source` parameter from their search requests. + notable: false diff --git a/docs/changelog/116962.yaml b/docs/changelog/116962.yaml new file mode 100644 index 000000000000..8f16b00e3f9f --- /dev/null +++ b/docs/changelog/116962.yaml @@ -0,0 +1,5 @@ +pr: 116962 +summary: "Add special case for elastic reranker in inference API" +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/116995.yaml b/docs/changelog/116995.yaml new file mode 100644 index 000000000000..a0467c630edf --- /dev/null +++ b/docs/changelog/116995.yaml @@ -0,0 +1,5 @@ +pr: 116995 +summary: "Apm-data: disable date_detection for all apm data streams" +area: Data streams +type: enhancement +issues: [] \ No newline at end of file diff --git a/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc index eed66d81e913..1bd36f801aa1 100644 --- a/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc @@ -111,6 +111,18 @@ output. Defaults to `5`. (Optional, Boolean) If `true`, only include the longest matching subword. Defaults to `false`. +`no_sub_matches`:: +(Optional, Boolean) +If `true`, do not match sub tokens in tokens that are in the word list. +Defaults to `false`. + +`no_overlapping_matches`:: +(Optional, Boolean) +If `true`, do not allow overlapping tokens. +Defaults to `false`. + +Typically users will only want to include one of the three flags as enabling `no_overlapping_matches` is the most restrictive and `no_sub_matches` is more restrictive than `only_longest_match`. When enabling a more restrictive option the state of the less restrictive does not have any effect. + [[analysis-hyp-decomp-tokenfilter-customize]] ==== Customize and add to an analyzer diff --git a/docs/reference/connector/docs/sync-rules.asciidoc b/docs/reference/connector/docs/sync-rules.asciidoc index 9b2a77be7db0..3ab72093666b 100644 --- a/docs/reference/connector/docs/sync-rules.asciidoc +++ b/docs/reference/connector/docs/sync-rules.asciidoc @@ -116,6 +116,12 @@ A "match" is determined based on a condition defined by a combination of "field" The `Field` column should be used to define which field on a given document should be considered. +[NOTE] +==== +Only top-level fields are supported. +Nested/object fields cannot be referenced with "dot notation". +==== + The following rules are available in the `Rule` column: * `equals` - The field value is equal to the specified value. diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index 12de469c6844..d4cdb85e4d62 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -112,7 +112,7 @@ different nodes so there's redundancy for any node failures. You can also use insurance. [discrete] -==== Local vs.remote storage +==== Local vs. remote storage include::./remote-storage.asciidoc[] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index 44f90eded863..4c16f260c13e 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -117,12 +117,10 @@ that sacrifices result accuracy for improved speed. The `dense_vector` type supports quantization to reduce the memory footprint required when <> `float` vectors. The three following quantization strategies are supported: -+ --- -`int8` - Quantizes each dimension of the vector to 1-byte integers. This reduces the memory footprint by 75% (or 4x) at the cost of some accuracy. -`int4` - Quantizes each dimension of the vector to half-byte integers. This reduces the memory footprint by 87% (or 8x) at the cost of accuracy. -`bbq` - experimental:[] Better binary quantization which reduces each dimension to a single bit precision. This reduces the memory footprint by 96% (or 32x) at a larger cost of accuracy. Generally, oversampling during query time and reranking can help mitigate the accuracy loss. --- +* `int8` - Quantizes each dimension of the vector to 1-byte integers. This reduces the memory footprint by 75% (or 4x) at the cost of some accuracy. +* `int4` - Quantizes each dimension of the vector to half-byte integers. This reduces the memory footprint by 87% (or 8x) at the cost of accuracy. +* `bbq` - experimental:[] Better binary quantization which reduces each dimension to a single bit precision. This reduces the memory footprint by 96% (or 32x) at a larger cost of accuracy. Generally, oversampling during query time and reranking can help mitigate the accuracy loss. + When using a quantized format, you may want to oversample and rescore the results to improve accuracy. See <> for more information. @@ -245,12 +243,11 @@ their vector field's similarity to the query vector. The `_score` of each document will be derived from the similarity, in a way that ensures scores are positive and that a larger score corresponds to a higher ranking. Defaults to `l2_norm` when `element_type: bit` otherwise defaults to `cosine`. - -NOTE: `bit` vectors only support `l2_norm` as their similarity metric. - + ^*^ This parameter can only be specified when `index` is `true`. + +NOTE: `bit` vectors only support `l2_norm` as their similarity metric. + .Valid values for `similarity` [%collapsible%open] ==== diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 0da75ac30d2d..86a81f1d155d 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -704,5 +704,3 @@ Instead they are only allowed as elements of specific retrievers: * <> * <> * <> -* <> - diff --git a/docs/reference/search/search-your-data/highlighting.asciidoc b/docs/reference/search/search-your-data/highlighting.asciidoc index 7ee13d971b03..6a432e610452 100644 --- a/docs/reference/search/search-your-data/highlighting.asciidoc +++ b/docs/reference/search/search-your-data/highlighting.asciidoc @@ -176,8 +176,6 @@ fragmenter:: Specifies how text should be broken up in highlight snippets: `simple` or `span`. Only valid for the `plain` highlighter. Defaults to `span`. -force_source:: deprecated; this parameter has no effect - `simple`::: Breaks up text into same-sized fragments. `span`::: Breaks up text into same-sized fragments, but tries to avoid breaking up text between highlighted terms. This is helpful when you're diff --git a/docs/reference/searchable-snapshots/index.asciidoc b/docs/reference/searchable-snapshots/index.asciidoc index a38971a0bae6..8b0b3dc57686 100644 --- a/docs/reference/searchable-snapshots/index.asciidoc +++ b/docs/reference/searchable-snapshots/index.asciidoc @@ -176,10 +176,10 @@ nodes that have a shared cache. ==== Manually mounting snapshots captured by an Index Lifecycle Management ({ilm-init}) policy can interfere with {ilm-init}'s automatic management. This may lead to issues such as data loss -or complications with snapshot handling. +or complications with snapshot handling. For optimal results, allow {ilm-init} to manage -snapshots automatically. +snapshots automatically. <>. ==== @@ -293,6 +293,14 @@ repository. If you wish to search data across multiple regions, configure multiple clusters and use <> or <> instead of {search-snaps}. +It's worth noting that if a searchable snapshot index has no replicas, then when the node +hosting it is shut down, allocation will immediately try to relocate the index to a new node +in order to maximize availability. For fully mounted indices this will result in the new node +downloading the entire index snapshot from the cloud repository. Under a rolling cluster restart, +this may happen multiple times for each searchable snapshot index. Temporarily +disabling allocation during planned node restart will prevent this, as described in +the <>. + [discrete] [[back-up-restore-searchable-snapshots]] === Back up and restore {search-snaps} diff --git a/docs/reference/security/authorization/built-in-roles.asciidoc b/docs/reference/security/authorization/built-in-roles.asciidoc index 6db08b307f19..d730587e7db1 100644 --- a/docs/reference/security/authorization/built-in-roles.asciidoc +++ b/docs/reference/security/authorization/built-in-roles.asciidoc @@ -14,11 +14,6 @@ roles have a fixed set of privileges and cannot be updated. Grants access necessary for the APM system user to send system-level data (such as monitoring) to {es}. -[[built-in-roles-apm-user]] `apm_user` :: -Grants the privileges required for APM users (such as `read` and -`view_index_metadata` privileges on the `apm-*` and `.ml-anomalies*` indices). -deprecated:[7.13.0,"See {kibana-ref}/apm-app-users.html[APM app users and privileges\] for alternatives."]. - [[built-in-roles-beats-admin]] `beats_admin` :: Grants access to the `.management-beats` index, which contains configuration information for the Beats. diff --git a/docs/reference/snapshot-restore/repository-azure.asciidoc b/docs/reference/snapshot-restore/repository-azure.asciidoc index 0e6e1478cfc5..50dc42ac9163 100644 --- a/docs/reference/snapshot-restore/repository-azure.asciidoc +++ b/docs/reference/snapshot-restore/repository-azure.asciidoc @@ -181,7 +181,7 @@ is running. When running {es} in https://azure.microsoft.com/en-gb/products/kubernetes-service[Azure Kubernetes -Service], for instance using {eck-ref}[{eck}], you should use +Service], for instance using {eck-ref}/k8s-snapshots.html#k8s-azure-workload-identity[{eck}], you should use https://azure.github.io/azure-workload-identity/docs/introduction.html[Azure Workload Identity] to provide credentials to {es}. To use Azure Workload Identity, mount the `azure-identity-token` volume as a subdirectory of the diff --git a/gradle.properties b/gradle.properties index 745fb4f9e51a..aa38a61ab005 100644 --- a/gradle.properties +++ b/gradle.properties @@ -2,7 +2,7 @@ org.gradle.welcome=never org.gradle.warning.mode=none org.gradle.parallel=true # We need to declare --add-exports to make spotless working seamlessly with jdk16 -org.gradle.jvmargs=-XX:+HeapDumpOnOutOfMemoryError -Xss2m --add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED --add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED --add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED --add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED --add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +org.gradle.jvmargs=-XX:+HeapDumpOnOutOfMemoryError -Xss2m --add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED --add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED --add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED --add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED --add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED --add-opens java.base/java.time=ALL-UNNAMED # Enforce the build to fail on deprecated gradle api usage systemProp.org.gradle.warning.mode=fail diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index d11c4b7fd9c9..1bdd93e3a747 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -1,5 +1,5 @@ [versions] -asm = "9.6" +asm = "9.7.1" jackson = "2.15.0" junit5 = "5.8.1" spock = "2.1-groovy-3.0" @@ -39,7 +39,7 @@ maven-model = "org.apache.maven:maven-model:3.6.2" mockito-core = "org.mockito:mockito-core:1.9.5" nebula-info = "com.netflix.nebula:gradle-info-plugin:11.3.3" reflections = "org.reflections:reflections:0.9.12" -shadow-plugin = "com.github.breskeby:shadow:3b035f2" +shadow-plugin = "com.gradleup.shadow:shadow-gradle-plugin:8.3.5" snakeyaml = { group = "org.yaml", name = "snakeyaml", version = { strictly = "2.0" } } spock-core = { group = "org.spockframework", name="spock-core", version.ref="spock" } spock-junit4 = { group = "org.spockframework", name="spock-junit4", version.ref="spock" } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2b8f1b2a09ad..5e874b52fc4c 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -219,6 +219,11 @@ + + + + + @@ -234,16 +239,31 @@ + + + + + + + + + + + + + + + @@ -414,21 +434,16 @@ + + + + + - - - - - - - - - - @@ -614,6 +629,11 @@ + + + + + @@ -799,6 +819,11 @@ + + + + + @@ -1196,6 +1221,11 @@ + + + + + @@ -1236,6 +1266,11 @@ + + + + + @@ -1311,6 +1346,11 @@ + + + + + @@ -1922,6 +1962,11 @@ + + + + + @@ -1937,6 +1982,11 @@ + + + + + @@ -2282,6 +2332,11 @@ + + + + + @@ -2810,6 +2865,11 @@ + + + + + @@ -2830,6 +2890,11 @@ + + + + + @@ -2965,6 +3030,16 @@ + + + + + + + + + + @@ -3020,6 +3095,11 @@ + + + + + @@ -3393,6 +3473,16 @@ + + + + + + + + + + @@ -3423,11 +3513,21 @@ + + + + + + + + + + @@ -3533,6 +3633,11 @@ + + + + + @@ -3548,66 +3653,131 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -3618,11 +3788,21 @@ + + + + + + + + + + @@ -3653,6 +3833,11 @@ + + + + + @@ -4153,6 +4338,11 @@ + + + + + @@ -4198,6 +4388,11 @@ + + + + + @@ -4238,6 +4433,11 @@ + + + + + @@ -4423,6 +4623,11 @@ + + + + + diff --git a/libs/simdvec/build.gradle b/libs/simdvec/build.gradle index 02f960130e69..ffc50ecb1f6f 100644 --- a/libs/simdvec/build.gradle +++ b/libs/simdvec/build.gradle @@ -33,7 +33,7 @@ tasks.matching { it.name == "compileMain21Java" }.configureEach { } tasks.named('test').configure { - if (BuildParams.getRuntimeJavaVersion().majorVersion.toInteger() >= 21) { + if (buildParams.getRuntimeJavaVersion().map{ it.majorVersion.toInteger() }.get() >= 21) { jvmArgs '--add-modules=jdk.incubator.vector' } } diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index a1ab6363166c..5df0a890af75 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -28,7 +28,7 @@ restResources { } } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index b2b7f86ce34e..e091f0175009 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -28,6 +28,8 @@ */ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundWordTokenFilterFactory { + private final boolean noSubMatches; + private final boolean noOverlappingMatches; private final HyphenationTree hyphenationTree; HyphenationCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { @@ -46,6 +48,9 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW } catch (Exception e) { throw new IllegalArgumentException("Exception while reading hyphenation_patterns_path.", e); } + + noSubMatches = settings.getAsBoolean("no_sub_matches", false); + noOverlappingMatches = settings.getAsBoolean("no_overlapping_matches", false); } @Override @@ -57,7 +62,9 @@ public TokenStream create(TokenStream tokenStream) { minWordSize, minSubwordSize, maxSubwordSize, - onlyLongestMatch + onlyLongestMatch, + noSubMatches, + noOverlappingMatches ); } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java index ad98c2f8ffe1..69dd8e91b52b 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java @@ -31,6 +31,9 @@ import org.hamcrest.MatcherAssert; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -42,6 +45,7 @@ import static org.hamcrest.Matchers.instanceOf; public class CompoundAnalysisTests extends ESTestCase { + public void testDefaultsCompoundAnalysis() throws Exception { Settings settings = getJsonSettings(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); @@ -63,6 +67,44 @@ public void testDictionaryDecompounder() throws Exception { assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); } + public void testHyphenationDecompoundingAnalyzerOnlyLongestMatch() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerOnlyLongestMatch", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat( + terms, + hasItems("kaffeemaschine", "kaffee", "fee", "maschine", "fussballpumpe", "fussball", "ballpumpe", "pumpe") + ); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + + /** + * For example given a word list of: ["kaffee", "fee", "maschine"] + * no_sub_matches should prevent the token "fee" as a token in "kaffeemaschine". + */ + public void testHyphenationDecompoundingAnalyzerNoSubMatches() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerNoSubMatches", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat(terms, hasItems("kaffeemaschine", "kaffee", "maschine", "fussballpumpe", "fussball", "ballpumpe")); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + + /** + * For example given a word list of: ["fuss", "fussball", "ballpumpe", "ball", "pumpe"] + * no_overlapping_matches should prevent the token "ballpumpe" as a token in "fussballpumpe. + */ + public void testHyphenationDecompoundingAnalyzerNoOverlappingMatches() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerNoOverlappingMatches", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat(terms, hasItems("kaffeemaschine", "kaffee", "maschine", "fussballpumpe", "fussball", "pumpe")); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + private List analyze(Settings settings, String analyzerName, String text) throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisModule analysisModule = createAnalysisModule(settings); @@ -92,20 +134,25 @@ public Map> getTokenFilters() { } private Settings getJsonSettings() throws IOException { - String json = "/org/elasticsearch/analysis/common/test1.json"; - return Settings.builder() - .loadFromStream(json, getClass().getResourceAsStream(json), false) - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + return getSettings("/org/elasticsearch/analysis/common/test1.json"); } private Settings getYamlSettings() throws IOException { - String yaml = "/org/elasticsearch/analysis/common/test1.yml"; + return getSettings("/org/elasticsearch/analysis/common/test1.yml"); + } + + private Settings getSettings(String filePath) throws IOException { + String hypenationRulesFileName = "de_DR.xml"; + InputStream hypenationRules = getClass().getResourceAsStream(hypenationRulesFileName); + Path home = createTempDir(); + Path config = home.resolve("config"); + Files.createDirectory(config); + Files.copy(hypenationRules, config.resolve(hypenationRulesFileName)); + return Settings.builder() - .loadFromStream(yaml, getClass().getResourceAsStream(yaml), false) + .loadFromStream(filePath, getClass().getResourceAsStream(filePath), false) .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), home.toString()) .build(); } } diff --git a/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml new file mode 100644 index 000000000000..37bcde1246a8 --- /dev/null +++ b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml @@ -0,0 +1,1130 @@ + + + + + + + + + + + + aA + bB + cC + dD + eE + fF + gG + hH + iI + jJ + kK + lL + mM + nN + oO + pP + qQ + rR + sS + tT + uU + vV + wW + xX + yY + zZ + �� + �� + �� + �� + � + + + + .aa6l .ab3a4s .ab3ei .abi2 .ab3it .ab1l .ab1r .ab3u .ad3o4r .alti6 + .ana3c .an5alg .an1e + .ang8s2t1 + .an1s .ap1p .ar6sc .ar6ta .ar6tei .as2z + .au2f1 .au2s3 .be5erb .be3na .ber6t5r .bie6r5 .bim6s5t .brot3 .bru6s + .ch6 .che6f5 .da8c .da2r .dar5in .dar5u .den6ka .de5r6en .des6pe + .de8spo .de3sz .dia3s4 .dien4 .dy2s1 .ehren5 .eine6 .ei6n5eh .ei8nen + .ein5sa .en6der .en6d5r .en3k4 .en8ta8 .en8tei .en4t3r .epo1 .er6ban + .er6b5ei .er6bla .er6d5um .er3ei .er5er .er3in .er3o4b .erwi5s .es1p + .es8t1l .es8t1n + .ex1a2 .ex3em .fal6sc .fe6st5a .flu4g3 .furch8 .ga6ner .ge3n4a + .ge5r� + .ges6 + .halb5 .halbe6 .hal6br .haup4 .hau4t .heima6 .he4r3e + .her6za .he5x .hin3 .hir8sc .ho4c .hu3sa .hy5o .ibe5 .ima6ge .in1 + .ini6 .is5chi .jagd5 .kal6k5o .ka6ph .ki4e .kop6f3 .kraf6 .k�5ra + .lab6br .liie6 .lo6s5k .l�4s3t .ma5d .mi2t1 .no6th .no6top + .obe8ri .ob1l .obs2 .ob6st5e .or3c .ort6s5e .ost3a .oste8r .pe4re + .pe3ts .ph6 .po8str .rau4m3 .re5an .ro8q .ru5the .r�5be + + .sch8 .se6e .se5n6h .se5ra .si2e .spi6ke .st4 .sy2n + .tages5 .tan6kl .ta8th .te6e .te8str .to6der .to8nin .to6we .um1 + .umpf4 .un1 .une6 .unge5n .ur1c .ur5en .ve6rin .vora8 .wah6l5 .we8ges + .we8s2t .wes3te + .wo6r .wor3a .wun4s .zi4e .zuch8 .�nde8re .�ch8 aa1c aa2gr + aal5e aa6r5a a5arti aa2s1t aat2s 6aba ab3art 1abdr 6abel aben6dr + ab5erk ab5err ab5esse 1abf 1abg 1abh� ab1ir 1abko a1bl ab1la + 5ablag a6bla� ab4ler ab1lu a8bl� 5a6bl� abma5c + 1abn ab1ra ab1re 5a6brec ab1ro + ab1s + ab8sk abs2z 3abtei ab1ur 1abw + 5abze 5abzu ab1�n ab�u8 a4ce. a5chal ach5art ach5au a1che + a8chent ach6er. a6ch5erf a1chi ach1l ach3m ach5n a1cho ach3re a1chu + ach1w a1chy ach5�f ack1o acks6t ack5sta a1d 8ad. a6d5ac ad3ant + ad8ar 5addi a8dein ade5o8 adi5en 1adj 1adle ad1op a2dre 3adres adt1 + 1adv a6d� a1e2d ae1r a1er. 1aero 8afa a3fal af1an a5far a5fat + af1au a6fentl a2f1ex af1fr af5rau af1re 1afri af6tent af6tra aft5re + a6f5um 8af� ag5abe 5a4gent ag8er ages5e 1aggr ag5las ag1lo a1gn + ag2ne 1agog a6g5und a1ha a1he ah5ein a4h3erh a1hi ahl1a ah1le ah4m3ar + ahn1a a5ho ahra6 ahr5ab ah1re ah8rei ahren8s ahre4s3 ahr8ti ah1ru a1hu + ah8� ai3d2s ai1e aif6 a3inse ai4re. a5isch. ais8e a3ismu ais6n + aiso6 a1j 1akad a4kade a1ke a1ki 1akko 5akro1 a5lal al5ans 3al8arm + al8beb al8berw alb5la 3album al1c a1le a6l5e6be a4l3ein a8lel a8lerb + a8lerh a6lert 5a6l5eth 1algi al4gli al3int al4lab al8lan al4l3ar + alle3g a1lo a4l5ob al6schm al4the + + al4t3re 8a1lu alu5i a6lur + alu3ta a1l� a6mate 8ame. 5a6meise am6m5ei am6mum am2n ampf3a + am6schw am2ta a1mu a1m� a3nac a1nad anadi5e an3ako an3alp 3analy + an3ame an3ara a1nas an5asti a1nat anat5s an8dent ande4s3 an1ec an5eis + an1e2k 4aner. a6n5erd a8nerf a6n5erke 1anfa 5anfert 1anf� 3angab + 5angebo an3gli ang6lis an2gn 3angri ang5t6 5anh� ani5g ani4ka + an5i8on an1kl an6kno an4kro 1anl anma5c anmar4 3annah anne4s3 a1no + 5a6n1o2d 5a6n3oma 5a6nord 1anr an1sa 5anschl an4soz an1st 5anstal + an1s2z 5antenn an1th 5anw� a5ny an4z3ed 5anzeig 5anzieh 3anzug + an1� 5an�s a1n� an�8d a1os a1pa 3apfel a2ph1t + aph5�6 a1pi 8apl apo1c apo1s + a6pos2t + a6poth 1appa ap1pr a1pr + a5p� a3p� a1ra a4r3af ar3all 3arbei 2arbt ar1c 2a1re ar3ein + ar2gl 2a1ri ari5es ar8kers ar6les ar4nan ar5o6ch ar1o2d a1rol ar3ony + a8ror a3ros ar5ox ar6schl 8artei ar6t5ri a1ru a1ry 1arzt arz1w + ar8z� ar�8m ar�6 ar5�m ar1�2 a1sa a6schec + asch5l asch3m a6schn a3s4hi as1pa asp5l + + as5tev 1asth + + a1str ast3re 8a1ta ata5c ata3la a6tapf ata5pl a1te a6teli aten5a + ate5ran 6atf 6atg a1th at3hal 1athl 2a1ti 5atlant 3atlas 8atmus 6atn + a1to a6t5ops ato6ra a6t5ort. 4a1tr a6t5ru at2t1h at5t6h� 6a1tu + atz1w a1t� a1t� au1a au6bre auch3a au1e aue4l 5aufent + 3auff� 3aufga 1aufn auf1t 3auftr 1aufw 3auge. au4kle aule8s 6aum + au8mar aum5p 1ausb 3ausd 1ausf 1ausg au8sin + + au4sta 1ausw 1ausz + aut5eng au1th 1auto au�e8 a1v ave5r6a aver6i a1w a6wes a1x + a2xia a6xio a1ya a1z azi5er. 8a� 1ba 8ba8del ba1la ba1na + ban6k5r ba5ot bardi6n ba1ro basten6 bau3sp 2b1b bb6le b2bli 2b1c 2b1d + 1be be1a be8at. be1ch 8becht 8becke. be5el be1en bee8rei be5eta bef2 + 8beff be1g2 beh�8 bei1s 6b5eisen bei3tr b8el bel8o belu3t be3nac + bend6o be6ners be6nerw be4nor ben4se6 bens5el be1n� be1n� + be1o2 b8er. be1ra be8rac ber8gab. ber1r be1r� bes8c bes5erh + bes2p be5tha bet5sc be1un be1ur 8bex be6zwec 2b1f8 + + 2b1g2 + bga2s5 bge1 2b1h bhole6 1bi bi1bl b6ie bi1el bi1la bil�5 bi1na + bi4nok + + bi6stu bi5tr bit4t5r b1j 2b1k2 bk�6 bl8 b6la. + 6b1lad 6blag 8blam 1blat b8latt 3blau. b6lav 3ble. b1leb b1led + 8b1leg 8b1leh 8bleid 8bleih 6b3lein + + ble4m3o 4blich b4lind + 8bling b2lio 5blit b4litz b1loh 8b1los 1blu 5blum 2blun blut3a blut5sc + 3bl� bl�s5c 5bl� 3bl� bl�8sc 2b1m 2b1n 1bo + bo1ch bo5d6s boe5 8boff 8bonk bo1ra b1ort 2b1p2 b1q 1br brail6 brast8 + bre4a b5red 8bref 8b5riem b6riga bro1s b1rup b2ruz 8br�h + br�s5c 8bs b1sa b8sang b2s1ar b1sc bs3erl bs3erz b8sof b1s2p + bst1h b3stru b5st� b6sun 2b1t b2t1h 1bu bu1ie bul6k b8ure bu6sin + 6b1v 2b1w 1by1 by6te. 8b1z + + 1b� b5�6s5 1b� + b6�5bere b�ge6 b�gel5e b�r6sc 1ca cag6 ca5la ca6re + ca5y c1c 1ce celi4c celich5 ce1ro c8h 2ch. 1chae ch1ah ch3akt cha6mer + 8chanz 5chara 3chari 5chato 6chb 1chef 6chei ch3eil ch3eis 6cherkl + 6chf 4chh 5chiad 5chias 6chins 8chj chl6 5chlor 6ch2m 2chn6 ch8nie + 5cho. 8chob choi8d 6chp ch3ren ch6res ch3r� 2chs 2cht cht5ha + cht3hi 5chthon ch6tin 6chuh chu4la 6ch3unt chut6t 8chw 1ci ci5tr c2k + 2ck. ck1ei 4ckh ck3l ck3n ck5o8f ck1r 2cks ck5stra ck6s5u c2l 1c8o + con6ne 8corb cos6t c3q 1c6r 8c1t 1cu 1cy 5c�1 c�5 1da. + 8daas 2dabg 8dabr 6dabt 6dabw 1dac da2gr 6d5alk 8d5amt dan6ce. + dani5er dan8ker 2danl danla6 6dans 8danzi 6danzu d1ap da2r1a8 2d1arb + d3arc dar6men 4d3art 8darz 1dat 8datm 2d1auf 2d1aus 2d1b 2d1c 2d1d + d5de d3d2h dd�mme8 1de 2deal de5an de3cha de1e defe6 6deff 2d1ehr + 5d4eic de5isc de8lar del6s5e del6spr de4mag de8mun de8nep dene6r + 8denge. 8dengen de5o6d 2deol de5ram 8derdb der5ein de1ro der1r d8ers + der5um de4s3am de4s3an de4sau de6sil de4sin de8sor de4spr de2su 8deul + de5us. 2d1f df2l 2d1g 2d1h 1di dia5c di5ara dice5 di3chr di5ena di1gn + di1la dil8s di1na 8dind 6dinf 4d3inh 2d1ins di5o6d di3p4t di8sen dis1p + di5s8per di6s5to + dis3tr + di8tan di8tin d1j 6dje 2dju 2d1k 2d1l 2d1m + 2d1n6 dni6 dnje6 1do 6d5obe do6berf 6d5ony do3ran 6dord 2d1org dor4t3h + + 6doth dott8e 2d1p d5q dr4 1drah 8drak d5rand 6dre. 4drech + d6reck 4d3reg 8d3reic d5reife 8drem 8d1ren 2drer 8dres. 6d5rh 1dria + d1ric 8drind droi6 dro5x 1dru 8drut dr�s5c 1dr� dr�5b + dr�8sc 2ds d1sa d6san dsat6 d1sc 5d6scha. 5dschik dse8e d8serg + 8dsl d1sp d4spak ds2po d8sp� d1st d1s� 2dt d1ta d1te d1ti + d1to dt1s6 d1tu d5t� 1du du5als du1b6 du1e duf4t3r 4d3uh du5ie + 8duml 8dumw 2d1und du8ni 6d5unt dur2c durch3 6durl 6dursa 8durt + dus1t + du8schr 2d1v 2d1w dwa8l 2d1z 1d� 6d�h 8d�nd d�6r + d�8bl d5�l d�r6fl d�8sc d5�4st + + 1d� ea4ben e1ac e1ah e1akt e1al. e5alf e1alg e5a8lin e1alk e1all + e5alp e1alt e5alw e1am e1and ea6nim e1ar. e5arf e1ark e5arm e3art + e5at. e6ate e6a5t6l e8ats e5att e6au. e1aus e1b e6b5am ebens5e + eb4lie eb4ser eb4s3in e1che e8cherz e1chi ech3m 8ech3n ech1r ech8send + ech4su e1chu eck5an e5cl e1d ee5a ee3e ee5g e1ei ee5isc eei4s3t + ee6lend e1ell ee5l� e1erd ee3r4e ee8reng eere6s5 ee5r� + ee6tat e1ex e1f e6fau e8fe8b 3effek ef3rom ege6ra eglo6si 1egy e1ha + e6h5ach eh5ans e6hap eh5auf e1he e1hi ehl3a eh1le ehl5ein eh1mu ehn5ec + e1ho ehr1a eh1re ehre6n eh1ri eh1ru ehr5um e1hu eh1w e1hy e1h� + e1h� e3h�t ei1a eia6s ei6bar eich3a eich5r ei4dar ei6d5ei + ei8derf ei3d4sc ei1e 8eifen 3eifri 1eign eil1d ei6mab ei8mag ein1a4 + ei8nat ei8nerh ei8ness ei6nete ein1g e8ini ein1k ei6n5od ei8nok ei4nor + e3ins� ei1o e1irr ei5ru ei8sab ei5schn ei6s5ent ei8sol ei4t3al + eit3ar eit1h ei6thi ei8tho eit8samt ei6t5um e1j 1ekd e1ke e1ki e1k2l + e1kn ekni4 e1la e2l1al 6elan e6lanf e8lanl e6l5ans el3arb el3arm + e6l3art 5e6lasti e6lauge elbst5a e1le 6elef ele6h e6l5ehe e8leif + e6l5einh 1elek e8lel 3eleme e6lemen e6lente el5epi e4l3err e6l5ersc + elf2l elg2 e6l5ins ell8er 4e1lo e4l3ofe el8soh el8tent 5eltern e1lu + elut2 e1l� e1l� em8dei em8meis 4emo emo5s 1emp1f 1empt 1emto + e1mu emurk4 emurks5 e1m� en5a6ben en5achs en5ack e1nad en5af + en5all en3alt en1am en3an. en3ant en3anz en1a6p en1ar en1a6s 6e1nat + en3auf en3aus en2ce enda6l end5erf end5erg en8dess 4ene. en5eck + e8neff e6n5ehr e6n5eim en3eis 6enem. 6enen e4nent 4ener. e8nerd + e6n3erf e4nerg 5energi e6n5erla en5ers e6nerst en5erw 6enes e6n5ess + e2nex en3glo 2eni enni6s5 ennos4 enns8 e1no e6nober eno8f en5opf + e4n3ord en8sers ens8kl en1sp ens6por en5t6ag enta5go en8terbu en6tid + 3entla ent5ric 5entwic 5entwu 1entz enu5i e3ny en8zan en1�f + e1n�s e1n�g eo1c e5o6fe e5okk e1on. e3onf e5onk e5onl e5onr + e5opf e5ops e5or. e1ord e1org eo5r6h eo1t e1pa e8pee e6p5e6g ep5ent + e1p2f e1pi 5epid e6pidem e1pl 5epos e6pos. ep4p3a e1pr e1p� e1q + e1ra. er5aal 8eraba e5rabel er5a6ben e5rabi er3abs er3ach era5e + era5k6l er3all er3amt e3rand e3rane er3ans e5ranz. e1rap er3arc + e3rari er3a6si e1rat erat3s er3auf e3raum 3erbse er1c e1re 4e5re. + er3eck er5egg er5e2h 2erei e3rei. e8reine er5einr 6eren. e4r3enm + 4erer. e6r5erm er5ero er5erst e4r3erz er3ess 5erf�l er8gan. + 5ergebn er2g5h 5erg�nz 5erh�hu 2e1ri eri5ak e6r5iat e4r3ind + e6r5i6n5i6 er5ins e6r5int er5itio er1kl 3erkl� 5erl�s. + ermen6s er6nab 3ernst 6e1ro. e1rod er1o2f e1rog 6e3roi ero8ide e3rol + e1rom e1ron e3rop8 e2r1or e1ros e1rot er5ox ersch4 5erstat er6t5ein + er2t1h er5t6her 2e1ru eruf4s3 e4r3uhr er3ums e5rus 5erwerb e1ry er5zwa + er3zwu er�8m er5�s er�8 e3r�s. e6r1�2b e1sa + esa8b e8sap e6s5a6v e1sc esch4l ese1a es5ebe eserve5 e8sh es5ill + es3int es4kop e2sl eso8b e1sp espei6s5 es2po es2pu 5essenz e6stabs + e6staf e6st5ak est3ar e8stob e1str est5res es3ur e2sz e1s� e1ta + et8ag etari5e eta8ta e1te eten6te et5hal e5thel e1ti 1etn e1to e1tr + et3rec e8tscha et8se et6tei et2th et2t1r e1tu etu1s et8zent et8zw + e1t� e1t� e1t� eu1a2 eu1e eue8rei eu5fe euin5 euk2 + e1um. eu6nio e5unter eu1o6 eu5p 3europ eu1sp eu5str eu8zo e1v eval6s + eve5r6en ever4i e1w e2wig ex1or 1exp 1extr ey3er. e1z e1�2 + e5�8 e1� e8�es fa6ch5i fade8 fa6del fa5el. + fal6lo falt8e fa1na fan4gr 6fanl 6fap far6ba far4bl far6r5a 2f1art + fa1sc fau8str fa3y 2f1b2 6f1c 2f1d 1fe 2f1eck fe6dr feh6lei f6eim + 8feins f5eis fel5en 8feltern 8femp fe5rant 4ferd. ferri8 fe8stof + fe6str fe6stum fe8tag fet6ta fex1 2ff f1fa f6f5arm f5fe ffe5in ffe6la + ffe8ler ff1f f1fla ff3lei ff4lie ff8sa ff6s5ta 2f1g2 fgewen6 4f1h 1fi + fid4 fi3ds fieb4 fi1la fi8lei fil4m5a f8in. fi1na 8finf fi8scho fi6u + 6f1j 2f1k2 f8lanz fl8e 4f3lein 8flib 4fling f2lix 6f3lon 5flop 1flor + 5f8l�c 3fl�t 2f1m 2f1n 1fo foh1 f2on fo6na 2f1op fo5ra + for8mei for8str for8th for6t5r fo5ru 6f5otte 2f1p8 f1q fr6 f5ram + 1f8ran f8ra� f8re. frei1 5frei. f3reic f3rest f1rib + 8f1ric 6frig 1fris fro8na fr�s5t 2fs f1sc f2s1er f5str + fs3t�t 2ft f1tak f1te ft5e6h ftere6 ft1h f1ti f5to f1tr ft5rad + ft1sc ft2so f1tu ftwi3d4 ft1z 1fu 6f5ums 6funf fun4ka fu8�end + 6f1v 2f1w 2f1z 1f� f�1c 8f�rm 6f�ug + f�8� f�de3 8f�f 3f�r 1f� + f�n4f3u 1ga ga6bl 6gabw 8gabz g3a4der ga8ho ga5isc 4gak ga1la + 6g5amt ga1na gan5erb gan6g5a ga5nj 6ganl 8gansc 6garb 2g1arc 2g1arm + ga5ro 6g3arti ga8sa ga8sc ga6stre 2g1atm 6g5auf gau5fr g5aus 2g1b g5c + 6gd g1da 1ge ge1a2 ge6an ge8at. ge1e2 ge6es gef2 8geff ge1g2l ge1im + 4g3eise geist5r gel8bra gelt8s ge5l� ge8nin gen3k 6g5entf + ge3n� ge1or ge1ra ge6rab ger8au 8gerh� ger8ins ge1ro 6g5erz. + ge1r� ge1r� ge1s ges2p + ge2s7te. ge2s7ten ge2s7ter ge2s7tik + ge5unt 4g3ex3 2g1f8 2g1g g1ha 6g1hei + 5ghel. g5henn 6g1hi g1ho 1ghr g1h� 1gi gi5la gi8me. gi1na + 4g3ins + gis1tr + g1j 2g1k 8gl. 1glad g5lag glan4z3 1glas 6glass 5glaub + g3lauf 1gle. g5leb 3gleic g3lein 5gleis 1glem 2gler 8g3leu gli8a + g2lie 3glied 1g2lik 1g2lim g6lio 1gloa 5glom 1glon 1glop g1los g4loss + g5luf 1g2ly 1gl� 2g1m gn8 6gn. 1gna 8gnach 2gnah g1nas g8neu + g2nie g3nis 1gno 8gnot 1go goe1 8gof 2gog 5gogr 6g5oh goni5e 6gonist + go1ra 8gord 2g1p2 g1q 1gr4 g5rahm gra8m gra4s3t 6g1rec gre6ge 4g3reic + g5reit 8grenn gri4e g5riem 5grif 2grig g5ring 6groh 2grot gro6� + 4grut 2gs gs1ab g5sah gs1ak gs1an gs8and gs1ar gs1au g1sc + gs1ef g5seil gs5ein g2s1er gs1in g2s1o gso2r gs1pr g2s1u 2g1t g3te + g2t1h 1gu gu5as gu2e 2gue. 6gued 4g3uh 8gums 6g5unt + + gut3h gu2tu + 4g1v 2g1w gy1n g1z 1g� 8g�8m 6g�rm 1g� 1g� + 6g�b 1haa hab8r ha8del hade4n 8hae ha5el. haf6tr 2hal. ha1la + hal4b5a 6hale 8han. ha1na han6dr han6ge. 2hani h5anth 6hanz 6harb + h3arbe h3arme ha5ro ha2t1h h1atm hau6san ha8� h1b2 h1c h1d + he2bl he3cho h3echt he5d6s 5heft h5e6he. hei8ds h1eif 2hein he3ism + he5ist. heit8s3 hek6ta hel8lau 8helt he6mer 1hemm 6h1emp hen5end + hen5klo hen6tri he2nu 8heo he8q her3ab he5rak her3an 4herap her3au + h3erbi he1ro he8ro8b he4r3um her6z5er he4spe he1st heta6 het5am he5th + heu3sc he1xa hey5e h1f2 h1g hgol8 h1h h1iat hie6r5i hi5kt hil1a2 + hil4fr hi5nak hin4ta hi2nu hi5ob hirn5e hir6ner hi1sp hi1th hi5tr + 5hitz h1j h6jo h1k2 hlabb4 hla4ga hla6gr h5lai hl8am h1las h1la� + hl1c h1led h3lein h5ler. h2lif h2lim h8linf hl5int h2lip + h2lit h4lor h3lose h1l�s hme5e h2nee h2nei hn3eig h2nel hne8n + hne4p3f hn8erz h6netz h2nip h2nit h1nol hn5sp h2nuc h2nud h2nul hoch1 + 1hoh hoh8lei 2hoi ho4l3ar 1holz h2on ho1ra 6horg 5horn. ho3sl hos1p + ho4spi h1p hpi6 h1q 6hr h1rai h8rank h5raum hr1c hrcre8 h1red h3reg + h8rei. h4r3erb h8rert hrg2 h1ric hr5ins h2rom hr6t5erl hr2t1h hr6t5ra + hr8tri h6rum hr1z hs3ach h6s5amt h1sc h6s5ec h6s5erl hs8erle h4sob + h1sp h8spa� h8spel hs6po h4spun h1str h4s3tum hs3und + h1s� h5ta. h5tab ht3ac ht1ak ht3ang h5tanz ht1ar ht1at h5taub + h1te h2t1ec ht3eff ht3ehe h4t3eif h8teim h4t3ein ht3eis h6temp h8tentf + hte8ren h6terf� h8tergr h4t3erh h6t5ersc h8terst h8tese h8tess + h2t1eu h4t3ex ht1he ht5hu h1ti ht5rak hts3ah ht1sc ht6sex ht8sk ht8so + h1tu htz8 h5t�m hub5l hu6b5r huh1l h5uhr. huld5a6 hu8lent + hu8l� h5up. h1v h5weib h3weis h1z h�8kl h�l8s + h�ma8tu8 h�8sche. h�t1s h�u4s3c 2h�. + 2h�e 8h�i h�6s h�s5c h�hne6 h�l4s3t + h�tte8re i5adn i1af i5ak. i1al. i1al1a i1alb i1ald i5alei i1alf + i1alg i3alh i1alk i1all i1alp i1alr i1als i1alt i1alv i5alw i3alz + i1an. ia5na i3and ian8e ia8ne8b i1ang i3ank i5ann i1ant i1anz i6apo + i1ar. ia6rab i5arr i1as. i1asm i1ass i5ast. i1at. i5ats i1au i5azz + i6b5eig i6b5eis ib2le i4blis i6brig i6b5unt i6b�b i1che ich5ei + i6cherb i1chi ich5ins ich1l ich3m ich1n i1cho icht5an icht3r i1chu + ich1w ick6s5te ic5l i1d id3arm 3ideal ide8na 3ideol ide5r� i6diot + id5rec id1t ie1a ie6b5ar iebe4s3 ie2bl ieb1r ie8bra ie4bre ie8b� + ie2dr ie1e8 ie6f5ad ief5f ie2f1l ie4fro ief1t i1ei ie4l3ec ie8lei + ie4lek i3ell i1en. i1end ien6e i3enf i5enn ien6ne. i1enp i1enr + i5ensa ien8stal i5env i1enz ie5o ier3a4b ie4rap i2ere ie4rec ie6r5ein + ie6r5eis ier8er i3ern. ie8rum ie8rund ie6s5che ie6tau ie8tert ie5the + ie6t5ri i1ett ie5un iex5 2if i1fa if5ang i6fau if1fr if5lac i5f6lie + i1fre ift5a if6t5r ig3art 2ige i8gess ig5he i5gla ig2ni i5go ig3rot + ig3s2p i1ha i8ham i8hans i1he i1hi ih1n ih1r i1hu i8hum ih1w 8i1i ii2s + ii2t i1j i1k i6kak i8kerz i6kes ik4ler i6k5unt 2il i5lac i1lag il3ans + i5las i1lau il6auf i1le ile8h i8lel il2fl il3ipp il6l5enn i1lo ilt8e + i1lu i1l� i8mart imb2 i8mele i8mid imme6l5a i1mu i1m� + i5m� ina5he i1nat in1au inau8s 8ind. in4d3an 5index ind2r 3indus + i5nec i2n1ei i8nerw 3infek 1info 5ingeni ing5s6o 5inhab ini5er. 5inj + in8k�t in8nan i1no inoi8d in3o4ku in5sau in1sp 5inspe 5instit + 5instru ins4ze 5intere 5interv in3the in5t2r i5ny in�2 i1n�r + in1�s in�8 in5�d i1n�s 2io io1a8 io1c iode4 io2di + ioi8 i1ol. i1om. i1on. i5onb ion2s1 i1ont i5ops i5o8pt i1or. + i3oral io3rat i5orc i1os. i1ot. i1o8x 2ip i1pa i1pi i1p2l i1pr i1q + i1ra ir6bl i1re i1ri ir8me8d ir2m1o2 ir8nak i1ro ir5rho ir6schl + ir6sch5r i5rus i5ry i5r� i1sa i8samt i6sar i2s1au i8scheh i8schei + isch5m isch3r isch�8 is8ele ise3ra i4s3erh is3err isi6de i8sind + is4kop ison5e is6por i8s5tum i5sty i5s� i1ta it5ab. i2t1a2m + i8tax i1te i8tersc i1thi i1tho i5thr it8h� i1ti i8ti8d iti6kl + itmen4 i1to i8tof it3ran it3rau i1tri itri5o it1sc it2se it5spa it8tru + i1tu it6z5erg it6z1w i1t� it�6r5e it�t2 it�ts5 + i1t� i1u iu6r 2i1v i6vad iva8tin i8vei i6v5ene i8verh i2vob i8vur + i1w iwi2 i5xa i1xe i1z ize8n i8zir i6z5w i�8m i1�6r + i5�t. i5�v i1�8 i�8 i6�5ers ja5la + je2t3r 6jm 5jo jo5as jo1ra jou6l ju5cha jugen4 jugend5 jung5s6 + + 3j� 1ka 8kachs 8kakz ka1la kal5d kam5t ka1na 2kanl 8kapf ka6pl + ka5r6a 6k3arbe ka1ro kar6p5f 4k3arti 8karz ka1r� kasi5e ka6teb + kat8ta kauf6s kau3t2 2k1b 2k1c 4k1d kehr6s kehrs5a 8keic 2k1eig 6k5ein + 6k5eis ke6lar ke8leis ke8lo 8kemp k5ente. k3entf 8k5ents 6kentz ke1ra + k5erlau 2k1f8 2k1g 2k1h ki5fl 8kik king6s5 6kinh ki5os ki5sp ki5th + 8ki8� 2k1k2 kl8 1kla 8klac k5lager kle4br k3leib 3kleid kle5isc + 4k3leit k3lek 6k5ler. 5klet 2klic 8klig k2lim k2lin 5klip 5klop k3lor + 1kl� 2k1m kmani5e kn8 6kner k2ni kn�8 1k2o ko1a2 ko6de. + ko1i koi8t ko6min ko1op ko1or ko6pht ko3ra kor6d5er ko5ru ko5t6sc k3ou + 3kow 6k5ox 2k1p2 k1q 1kr8 4k3rad 2k1rec 4k3reic kre5ie 2krib 6krig + 2krip 6kroba 2ks k1sa k6sab ksal8s k8samt k6san k1sc k2s1ex k5spat + k5spe k8spil ks6por k1spr kst8 k2s1uf 2k1t kta8l kt5a6re k8tein kte8re + k2t1h k8tinf kt3rec kt1s 1ku ku1ch kuck8 k3uhr ku5ie kum2s1 kunfts5 + kun2s kunst3 ku8rau ku4ro kurz1 + + 4kusti ku1ta ku8� + 6k1v 2k1w ky5n 2k1z 1k� k�4m 4k3�mi k�se5 1k� + k�1c k�1s 1k� k�1c k�r6sc + + 1la. + 8labf 8labh lab2r 2l1abs lach3r la8dr 5ladu 8ladv 6laff laf5t la2gn + 5laken 8lamb la6mer 5lampe. 2l1amt la1na 1land lan4d3a lan4d3r lan4gr + 8lanme 6lann 8lanw 6lan� 8lappa lap8pl lap6pr l8ar. la5ra lar4af + la8rag la8ran la6r5a6s l3arbe la8rei 6larm. la8sa la1sc la8sta lat8i + 6l5atm 4lauss 4lauto 1law 2lb l8bab l8bauf l8bede l4b3ins l5blo + lbst5an lbst3e 8lc l1che l8chert l1chi lch3m l5cho lch5w 6ld l4d3ei + ld1re l6d�b le2bl le8bre lecht6s5 led2r 6leff le4gas 1lehr lei6br + le8inf 8leinn 5leistu 4lektr le6l5ers lemo2 8lemp l8en. 8lends + 6lendun le8nend len8erw 6l5ents 4l3entw 4lentz 8lenzy 8leoz 6lepi + le6pip 8lepo 1ler l6er. 8lerbs 6l5erde le8reis le8rend le4r3er 4l3erg + l8ergr 6lerkl 6l5erzie 8ler� 8lesel lesi5e le3sko le3tha let1s + 5leuc 4leuro leu4s3t le5xe 6lexp l1f 2l1g lgend8 l8gh lglie3 lglied6 + 6l1h 1li li1ar li1as 2lick li8dr li1en lien6n li8ers li8ert 2lie� + 3lig li8ga8b li1g6n li1l8a 8limb li1na 4l3indu lings5 + 4l3inh 6linj link4s3 4linkt 2lint 8linv + + 4lipp 5lipt 4lisam + livi5e 6l1j 6l1k l8keim l8kj lk2l lko8f lkor8 lk2sa lk2se 6ll l1la + ll3a4be l8labt ll8anl ll1b ll1c ll1d6 l1le l4l3eim l6l5eise ller3a + l4leti l5lip l1lo ll3ort ll5ov ll6spr llte8 l1lu ll3urg l1l� + l5l� l6l�b 2l1m l6m5o6d 6ln l1na l1no 8lobl lo6br 3loch. + l5o4fen 5loge. 5lohn 4l3ohr 1lok l2on 4l3o4per lo1ra 2l1ord 6lorg + 4lort lo1ru 1los. lo8sei 3losig lo6ve lowi5 6l1p lp2f l8pho l8pn + lp4s3te l2pt l1q 8l1r 2ls l1sa l6sarm l1sc l8sec l6s5erg l4s3ers l8sh + l5s6la l1sp ls4por ls2pu l1str l8suni l1s� 2l1t lt5amp l4t3ein + l5ten l6t5eng l6t5erp l4t3hei lt3her l2t1ho l6t5i6b lti1l l8tr� + lt1sc lt6ser lt4s3o lt5ums lu8br lu2dr lu1en8 8lu8fe luft3a luf8tr + lu6g5r 2luh l1uhr lu5it 5luk 2l1umf 2l1umw 1lun 6l5u6nio 4l3unte lu5ol + 4lurg 6lurs l3urt lu4sto + lus1tr + lu6st5re lu8su lu6tal lu6t5e6g lu8terg + lu3the lu6t5or lu2t1r lu6�5 l1v lve5r6u 2l1w 1ly lya6 + 6lymp ly1no l8zess l8zo8f l3zwei lz5wu 3l�nd l�5on + l�6sc l�t1s 5l�uf 2l�ug l�u6s5c l�5v + l1�l 1l�s l�1�6t 6l1�be 1ma + 8mabg ma5chan mad2 ma5el 4magg mag8n ma1la ma8lau mal5d 8malde mali5e + malu8 ma8lut 2m1amp 3man mand2 man3ds 8mangr mani5o 8m5anst 6mappa + 4m3arbe mar8kr ma1r4o mar8schm 3mas ma1sc ma1t� 4m5auf ma5yo 2m1b + mb6r 2m1c 2m1d md6s� 1me me1ch me5isc 5meld mel8sa 8memp me5nal + men4dr men8schl men8schw 8mentsp me1ra mer4gl me1ro 3mes me6s5ei me1th + me8� 2m1f6 2m1g 2m1h 1mi mi1a mi6ale mi1la 2m1imm mi1na + mi5n� mi4s3an mit1h mi5t6ra 3mitt mitta8 mi6�5 6mj + 2m1k8 2m1l 2m1m m6mad m6m5ak m8menth m8mentw mme6ra m2mn mm5sp mm5ums + mmut5s m8m�n m1n8 m5ni 1mo mo5ar mo4dr 8mof mo8gal mo4kla mol5d + m2on mon8do mo4n3od + mon2s1tr + mont8a 6m5ony mopa6 mo1ra mor8d5a mo1sc mo1sp 5mot + moy5 2mp m1pa mpfa6 mpf3l mphe6 m1pi mpin6 m1pl mp2li m2plu mpo8ste + m1pr mpr�5 mp8th mput6 mpu5ts m1p� 8m1q 2m1r 2ms ms5au m1sc + msch4l ms6po m3spri m1str 2m1t mt1ar m8tein m2t1h mt6se mt8s� + mu5e 6m5uh mumi1 1mun mun6dr muse5e mu1ta 2m1v mvol2 mvoll3 2m1w 1my + 2m1z m�6kl 1m�n m�1s m�5tr m�u4s3c 3m�� + m�b2 6m�l 1m� 5m�n 3m�t 1na. + n5ab. 8nabn n1abs n1abz na6b� na2c nach3e 3nacht 1nae na5el + n1afr 1nag 1n2ah na8ha na8ho 1nai 6nair na4kol n1akt nal1a 8naly 1nama + na4mer na1mn n1amp 8n1amt 5nanc nan6ce n1and n6and. 2n1ang 1nani + 1nann n1ans 8nanw 5napf. 1n2ar. na2ra 2n1arc n8ard 1nari n8ark + 6n1arm 5n6ars 2n1art n8arv 6natm nat6s5e 1naue 4nauf n3aug 5naui n5auk + na5um 6nausb 6nauto 1nav 2nax 3naz 1na� n1b2 nbau5s n1c + nche5e nch5m 2n1d nda8d n2d1ak nd5ans n2d1ei nde8lac ndel6sa n8derhi + nde4se nde8stal n2dj ndnis5 n6d5or6t nd3rec nd3rot nd8samt nd6sau + ndt1h n8dumd 1ne ne5as ne2bl 6n5ebn 2nec 5neei ne5en ne1g4l 2negy + 4n1ein 8neis 4n3e4lem 8nemb 2n1emp nen1a 6n5energ nen3k 8nentb + 4n3en3th 8nentl 8n5entn 8n5ents ne1ra ne5r8al ne8ras 8nerbi 6n5erde. + nere5i6d nerfor6 6n5erh� 8nerl� 2n1err n8ers. 6n5ertra + 2n1erz nesi3e net1h neu4ra neu5sc 8neu� n1f nf5f nf2l + nflei8 nf5lin nft8st n8g5ac ng5d ng8en nge8ram ngg2 ng1h n6glic ng3rip + ng8ru ng2se4 ng2si n2g1um n1gy n8g�l n1h nhe6r5e 1ni ni1bl + ni5ch� ni8dee n6ie ni1en nie6s5te niet5h ni8etn 4n3i6gel n6ik + ni1la 2n1imp ni5na 2n1ind 8ninf 6n5inh ni8nit 6n5inn 2n1ins 4n1int + n6is + nis1tr + ni1th ni1tr n1j n6ji n8kad nk5ans n1ke n8kerla n1ki nk5inh + n5kl� n1k2n n8k5not nk3rot n8kr� nk5spo nk6t5r n8kuh + n6k�b n5l6 nli4mi n1m nmen4s n1na n8nerg nni5o n1no nn4t3ak nnt1h + nnu1e n1ny n1n� n1n� n1n� no5a no4b3la 4n3obs 2nobt + noche8 no6die no4dis no8ia no5isc 6n5o6leu no4mal noni6er 2n1onk n1ony + 4n3o4per 6nopf 6nopti no3ra no4ram nor6da 4n1org 2n1ort n6os no1st + 8nost. no8tan no8ter noty6pe 6n5ox n1p2 n1q n1r nr�s3 6ns n1sac + ns3ang n1sc n8self n8s5erf n8serg n6serk ns5erw n8sint n1s2pe n1spr + n6s5tat. + + n6stob n1str n1ta n4t3a4go nt5anh nt3ark nt3art + n1te nt3eis nte5n6ar nte8nei nter3a nte6rei nt1ha nt6har n3ther nt5hie + n3thus n1ti nti1c n8tinh nti1t ntlo6b ntmen8 n1to nt3o4ti n1tr ntra5f + ntra5ut nt8rea nt3rec nt8rep n4t3rin nt8rop n4t3rot n4tr� nt1s + nts6an nt2sk n1tu nt1z n1t� n1t� n8t�l n1t� 1nu + nu1a nu5el nu5en 4n1uhr nu5ie 8numl 6n5ums 6n5umw 2n1und 6nuni 6n5unr + 2n1unt 2nup 2nu6r n5uri nu3skr nu5ta n1v 8n1w 1nys n1za n6zab n2z1ar + n6zaus nzi4ga n8zof n6z5unt n1zw n6zwir 1n�c 5n�e 5n�i + n8�l n�6m n�6re n5�rz 5n�us n1�l + 1n�t n5�z 5n�. 6n1�2b 5n�� + o5ab. oa2l o8ala o1a2m o1an ob1ac obe4ra o6berh 5o4bers o4beru + obe6ser 1obj o1bl o2bli ob5sk 3obst. ob8sta obst5re ob5sz o1che + oche8b o8chec o3chi och1l och3m ocho8f o3chro och3to o3chu och1w o1d + o2d1ag od2dr ode5i ode6n5e od1tr o5e6b o5e6der. oe8du o1ef o1e2l + o1e2p o1er. o5e8x o1fa of8fan 1offi of8fin of6f5la o5fla o1fr 8o1g + og2n o1ha o1he o6h5eis o1hi ohl1a oh1le oh4l3er 5ohm. oh2ni o1ho + oh1re oh1ru o1hu oh1w o1hy o1h� o5ia o1id. o8idi oi8dr o5ids + o5isch. oiset6 o1ism o3ist. o5i6tu o1j o1k ok2l ok3lau o8kl� + 1okta o1la old5am old5r o1le ole5in ole1r ole3u ol6gl ol2kl olk4s1 + ol8lak ol8lauf. ol6lel ol8less o1lo + ol1s ol2ster + ol6sk o1lu oly1e2 5olym + o2mab om6an o8mau ombe4 o8merz om5sp o1mu o8munt o1m� o1m� + o1na ona8m on1ax on8ent o6n5erb 8oni oni5er. on1k on6n5a6b o1no ono1c + o4nokt 1ons onts8 o1n� oo8f 1oog oo2pe oo2sa o1pa 3o4pera o3pfli + opf3lo opf3r o1pi o1pl o2pli o5p6n op8pa op6pl o1pr o3p4ter 1opti + o1p� o5p� o1q o1ra. o3rad o8radd 1oram o6rang o5ras o8rauf + or5cha or4d3a4m or8dei or8deu 1ordn or4dos o1re o5re. ore2h o8r5ein + ore5isc or6enn or8fla or8fli 1orga 5orgel. or2gl o1ri 5o6rient or8nan + or8n� o1ro or1r2h or6t5an or8tau or8tere o1rus o1ry o1r� + or1�2 o1sa osa3i 6ose o8serk o1sk o6ske o6ski os2kl os2ko os2kr + osni5e o2s1o2d o3s4per o4stam o6stau o3stra ost3re osu6 o6s5ur o5s6ze + o1ta ot3auf o6taus o1te o6terw o1th othe5u o2th1r o1ti o1to oto1a + ot1re o1tri o1tro ot1sc o3tsu ot6t5erg ot2t3h ot2t5r ot8t� o1tu + ou3e ouf1 ou5f6l o5u6gr ou5ie ou6rar ou1t6a o1v o1wa o1we o6wer. o1wi + owid6 o1wo o5wu o1xe oy5al. oy1e oy1i o5yo o1z oza2r 1o2zea ozo3is + o�8 o�5elt o�1t 3paa pa6ce 5pad pag2 1pak + pa1la pa8na8t pani5el pa4nor pan1s2 1pap pap8s pa8rei par8kr paro8n + par5o6ti part8e 5partei 3partn pas6sep pa4tha 1pau 6paug pau3sc p1b + 8p5c 4p1d 1pe 4peic pe5isc 2pek pen3k pen8to8 p8er pe1ra pere6 per5ea + per5eb pe4rem 2perr per8ran 3pers 4persi pe3r� pe4sta pet2s + p2f1ec p4fei pf1f pf2l 5pflanz pf8leg pf3lei 2pft pf3ta p1g 1ph 2ph. + 2p1haf 6phb 8phd 6p5heit ph5eme 6phg phi6e 8phk 6phn p5holl pht2 + ph3tha 4ph3the phu6 6phz pi1en pi5err pi1la pi1na 5pinse pioni8e 1pis + pi1s2k pi1th p1k pl8 5pla p2lau 4plei p3lein 2pler 6p5les 2plig p6lik + 6p5ling p2liz plo8min 6p1m p1n 1p2o 8poh 5pol po8lan poly1 po3ny po1ra + 2porn por4t3h po5r� 5poti p1pa p6p5ei ppe6la pp5f p2p1h p1pi pp1l + ppp6 pp5ren + pp1s pp2ste + p5p� pr6 3preis 1pres 2p3rig 5prinz 1prob 1prod + 5prog pro8pt pro6t5a prote5i 8pro� pr�3l 1pr�s + pr�te4 1pr�f p5schl 2pst 1p2sy p1t p8to8d pt1s 5p6ty 1pu + pu1b2 2puc pu2dr puf8fr 6p5uh pun8s pu8rei pu5s6h pu1ta p1v p3w 5py + py5l p1z p�6der p5�6m p�8nu 8p�r p�t5h + p�t1s qu6 1qui 8rabk ra6bla 3rable ra2br r1abt 6rabz ra4dan ra2dr + 5rafal ra4f3er ra5gla ra2g3n 6raha ral5am 5rald 4ralg ra8lins 2rall + ral5t 8ramei r3anal r6and ran8der ran4dr 8ranf 6ranga 5rangi ran8gli + r3angr rans5pa 8ranw r8anz. ra5or 6rapf ra5pl rap6s5er 2r1arb 1rarh + r1arm ra5ro 2r1art 6r1arz ra8tei ra6t5he 6ratl ra4t3ro r5atta raue4n + 6raus. r5austa rau8tel raut5s ray1 r1b rb5lass r6bler rb4lie rbon6n + r8brecht rb6s5t� r8ces r1che rch1l rch3m rch3re rch3tr rch1w 8rd + r1da r8dachs r8dap rda5ro rde5ins rdio5 r8dir rd3ost r1dr r8drau 1re. + re1ak 3reakt re3als re6am. re1as 4reben re6bl rech5a r8edi re3er + 8reff 3refl 2reh 5reha r4ei. reich6s5 8reier 6reign re5imp 4r3eina + 6r3einb 6reing 6r5einn 6reinr 4r3eins r3eint reli3e 8r5elt 6rempf + 2remt ren5a6b ren8gl r3enni 1reno 5rente 4r3enth 8rentl 4r3entw 8rentz + ren4zw re1on requi5 1rer rer4bl 6rerbs 4r3erd 8rerh� 8rerkl + 4r3erla 8rerl� 4r3erns 6r5ern� rer5o 6r5erreg r5ertr r5erwec + r5er� re2sa re8schm 2ress re5u8ni 6rewo 2r1ex r1f r8ferd rf4lie + 8r1g r8gah rge4bl rge5na rgest4 rg6ne r2gni2 r8gob r4g3ret rg8sel r1h8 + r2hy 5rhyt ri1ar ri5cha rid2g r2ie rieg4s5 ri8ei ri1el ri6ele ri1en + ri3er. ri5ers. ri6fan ri8fer ri8fr 1r2ig ri8kn ri5la rim�8 + ri1na r8inde rin4ga rin6gr 1rinn 6rinner rino1 r8insp 4rinst + ri1n� ri5o6ch ri1o2d ri3o6st 2r1ir r2is ri3sko ri8spr + + ri5sv r2it 6r5i6tal ri5tr ri6ve. 8r1j 6rk r1ke rkehrs5 r1ki r3klin + r1k2n rk3str rk4t3an rk6to r6kuh rk�4s3t r1l r5li rline5a 6r1m + r6manl rma4p r4m3aph r8minf r8mob rm5sa 2rn r1na rna8be r5ne rn2ei + r6neif r6nex r6nh rn1k r1no r6n5oc rn1sp r1n� r1n� ro6bern + 6robs ro1ch 3rock. ro5de ro1e 4rofe ro8hert 1rohr ro5id ro1in ro5isc + 6rolym r2on 6roog ro6phan r3ort ro1s2p ro5s6w ro4tau ro1tr ro6ts 5rout + r1p rpe8re rp2f r2ps r2pt r1q 2rr r1ra r1re rrer6 + rr6hos r5rh� + r1ri r1ro rro8f rr8or rror5a r1ru r3ry r1r� r1r� r1r� + 2r1s + r2ste r2sti + r6sab r4sanf rse6e rse5na r2sh r6ska r6ski rs2kl r8sko r2sl rs2p + r6stauf r8sterw r8stran rswi3d4 r2sz 2r1t rt3art r8taut r5tei rt5eige + r8tepe r4t3erh r8terla r4t3hei r5t6hu r4t3int rt5reif rt1sc rt6ser + rt6s5o rt6s5u rt5und r8turt rube6 ru1en 1r4uf ruf4st ru1ie 2r1umg + 2r1uml 2rums run8der run4d5r 6rundz 6runf 8runs 2r1unt 2r1ur r6us + ru6sta + rus1tr + ru6tr 1ruts r1v rven1 rvi2c r1w r1x r1za rz5ac r6z5al + r8z1ar r8zerd r6z5erf rz8erh rz4t3h r8zum r�4ste r�u8sc + r1�f 5r�hr r�5le 3r�ll 5r�mis r1�r + r�2sc 3r�mp 1sa. 1saa s3a4ben sa2bl 2s1abs 6s1abt 6sabw + 3sack. 6s3a4der 1saf sa1fa 4s1aff sa5fr 1sag 1sai sa1i2k1 4s1akt 1sal + sa1la 4s3alpi 6salter salz3a 1sam s5anb san2c 1sand s5angeh 6sanl + 2s1ans 6s3antr 8s1anw s1ap s6aph 8sapo sap5p6 s8ar. 2s1arb 3sarg + s1arm sa5ro 2s1art 6s1arz 1sas 1sat sat8a 2s1atl sa8tom 3s8aue s5auff + sau5i s6aur 2s1aus 5s6ause 2s1b2 2sca s4ce 8sch. 3scha. 5schade + 3schaf 3schal sch5ame 8schanc 8schb 1sche 6schef 8schex 2schf 2schg + 2schh 1schi 2schk 5schlag 5schlu 6schm�� + 6schna� 1scho 6schord 6schp 3schri 8schric 8schrig + 8schrou 6schs 2scht sch3ta sch3tr 1schu 8schunt 6schv 2schz 5sch� + 5sch� 2sco scre6 6scu 2s1d 1se se5an se1ap se6ben se5ec see5i6g + se3erl 8seff se6han se8hi se8h� 6s5eid. 2s1eig s8eil 5sein. + sei5n6e 6s5einh 3s8eit 3sel. se4lar selb4 6s3e4lem se8lerl 2s1emp + sen3ac se5nec 6s5ents 4sentz s8er. se8reim ser5inn 8serm� + 8s5erzi 6ser�f se1um 8sexa 6sexp 2s1f2 sfal8ler 2s3g2 sge5b2 s1h + s8hew 5s6hip 5s4hop 1si 2siat si1b sicht6s 6s5i6dee siege6s5 si1en + si5err si1f2 si1g2n si6g5r si8kau sik1i si4kin si2kl si8k� si1la + sil6br si1na 2s1inf sin5gh 2s1inh sinne6s5 2s1ins si5ru si5str 4s1j + s1k2 6sk. 2skau skel6c skelch5 s6kele 1s2ki. 3s4kin. s6kiz s8kj + 6skn 2skow 3skrib 3skrip 2sku 8sk� s1l s8lal slei3t s4low 2s1m + s1n 6sna 6snot 1so so1ch 2s1odo so4dor 6s5o4fen solo3 s2on so5of 4sope + so1ra 2s1ord 4sorga sou5c so3un 4s3ox sp2 8spaa 5spal 1span 2spap + s2pec s4peis 1spek s6perg 4spers s6pes 2s1pf 8sphi 1s2ph� 1spi + spi4e 6s5pig 6spinse 2spis 2spla 2spol 5s6pom 6s5pos 6spoti 1spra + 3s8prec 6spreis 5spring 6sprob 1spru s2pul 1s2pur 6spy 5sp�n + 1sp� s1q 2s1r + + + 2ssa 2sse 2ssi 2sso 2ss� 2ss� 2ss� 2s1sch + sse8nu ssini6s ssoi6r 2st. + 1sta 4stafe 2stag + sta3la 6stale + 4s2talg + 8stalk 8stamt 6st5anf 4stans 6stanw 6starb sta4te + 6staus 2stb 6stc 6std + s1te + 4steil + + 6steppi + + 8stesse 6stf 2stg 2sth st1ha st3hei s8t1hi st1ho st5hu + s1ti + s2ti4el + 4s2tigm + + 6s2tind + 4s2tinf + s2ti8r + 2stk 2stl 2stm + 1sto 6stoll. 4st3ope + 6stopf. 6stord 6stp + + 4strai + s3tral + 6s5traum 3stra� + 3strec 6s3tref 8streib 5streif 6streno 6stres 6strev + + 2st5rig + + 8s2t1ris + + s8troma st5rose 4struf 3strum + 6str�g 2st1s6 2stt + 1stu stu5a 4stuc 2stue 8stun. 2stv 2stw s2tyl + 6stz 1st� 8st�g + 1st� + 1st� 8st�ch 4st�r. + 1su su2b1 3suc su1e su2fe su8mar 6sumfa 8sumk 2s1unt sup1p2 6s5u6ran + 6surte 2s1v 2s1w 1sy 8syl. sy5la syn1 sy2na syne4 s1z s4zend 5s6zene. + 8szu 1s� 6s5�nd 6s�ugi 6s�u� + 5s�m 2s1�2b 1s�c s�8di 1s�n 5s�� + taats3 4tab. taba6k ta8ban tab2l ta6bre 4tabs t3absc + 8tabz 6t3acht ta6der 6tadr tad6s tad2t 1tafe4 1tag ta6ga6 ta8gei + tage4s tag6s5t tah8 tahl3 tai6ne. ta5ir. tak8ta tal3au 1tale ta8leng + tal5ert 6t5a6mer 6tamp tampe6 2t1amt tan5d6a tan8dr tands5a tani5e + 6tanl 2tanr t3ans 8t5antr tanu6 t5anw 8tanwa tan8zw ta8rau 6tarbe + 1tari 2tark 2t1arm ta1ro 2tart t3arti 6tarz ta1sc ta6sien ta8stem + ta8sto t5aufb 4taufn 8taus. 5tause 8tausf 6tausg t5ausl 2t1b2 2t1c + t6chu 2t1d te2am tea4s te8ben 5techn 4teff te4g3re te6hau 2tehe te4hel + 2t1ehr te5id. teig5l 6teign tei8gr 1teil 4teinh t5einhe 4teis t5eisen + 8teiw te8lam te4lar 4telek 8telem te6man te6n5ag ten8erw ten5k tens4p + ten8tro 4t3entw 8tentz te6pli 5teppi ter5a6b te3ral ter5au 8terbar + t5erbe. 6terben 8terbs 4t3erbt t5erde. ter5ebe ter5ein te8rers terf4 + 8terh� 6terkl� ter8nor ter6re. t8erscha t5e6sel te8stau + t3euro te1xa tex3e 8texp tex6ta 2t1f2 2t1g2 2th. th6a 5tha. 2thaa + 6t1hab 6t5haf t5hah 8thak 3thal. 6thals 6t3hand 2t1hau 1the. 3t4hea + t1heb t5heil t3heit t3helf 1theo 5therap 5therf 6t5herz 1thes 1thet + 5thi. 2t1hil t3him 8thir 3this t5hj 2th1l 2th1m th1n t5hob t5hof + 4tholz 6thopti 1thr6 4ths t1hum 1thy 4t1h� 2t1h� t1h� + ti1a2m ti1b tie6fer ti1en ti8gerz tig3l ti8kin ti5lat 1tilg t1ind + tin4k3l ti3spa ti5str 5tite ti5tr ti8vel ti8vr 2t1j 2t1k2 2t1l tl8a + 2t1m8 2t1n 3tobe 8tobj to3cha 5tocht 8tock tode4 to8del to8du to1e + 6t5o6fen to1in toi6r 5toll. to8mene t2ons 2t1ony to4per 5topf. 6topt + to1ra + to1s to2ste + to6ska tos2l 2toti to1tr t8ou 2t1p2 6t1q tr6 tra5cha + tra8far traf5t 1trag tra6gl tra6gr t3rahm 1trai t6rans tra3sc tra6st + 3traue t4re. 2trec t3rech t8reck 6t1red t8ree 4t1reg 3treib 4treif + 8t3reis 8trepo tre6t5r t3rev 4t3rez 1trib t6rick tri6er 2trig t8rink + tri6o5d trizi5 tro1a 3troc trocke6 troi8d tro8man. tro3ny 5tropf + 6t5rosa t5ro� 5trub 5trup trut5 1tr�g 6t1r�h + 5tr�b tr�3bu t1r�c t1r�s 2ts ts1ab t1sac tsa8d + ts1ak t6s5alt ts1an ts1ar ts3auf t3schr t5sch� tse6e tsee5i + tsein6s ts3ent ts1er t8serf t4serk t8sh 5t6sik t4s3int ts5ort. + t5s6por t6sprei + t1st t2ste + t6s5tanz ts1th t6stit t4s3tor 1t2sua t2s1uf + t8sum. t2s1u8n t2s1ur 2t1t tt5eif tte6sa tt1ha tt8ret tt1sc tt8ser + tt5s6z 1tuc tuch5a 1tu1e 6tuh t5uhr tu1i tu6it 1tumh 6t5umr 1tums + 8tumt 6tund 6tunf 2t1unt tu5ra tu6rau tu6re. tu4r3er 2t1v 2t1w 1ty1 + ty6a ty8la 8tym 6ty6o 2tz tz5al tz1an tz1ar t8zec tzeh6 tzehn5 t6z5ei. + t6zor t4z3um t6z�u 5t�g 6t�h t5�lt t8�n + t�re8 8t�8st 6t�u� t5�ffen + 8t�8k 1t�n 4t�b t6�5ber. 5t�ch 1t�r. + u3al. u5alb u5alf u3alh u5alk u3alp u3an. ua5na u3and u5ans u5ar. + ua6th u1au ua1y u2bab ubi5er. u6b5rit ubs2k u5b� u8b�b 2uc + u1che u6ch5ec u1chi uch1l uch3m uch5n uch1r uch5to ucht5re u1chu uch1w + uck1a uck5in u1d ud4a u1ei u6ela uene8 u6ep u1er uer1a ue8rerl uer5o + u8esc u2est u8ev u1fa u2f1ei u4f3ent u8ferh uf1fr uf1l uf1ra uf1re + uf1r� uf1r� uf1s2p uf1st uft1s u8gabt u8gad u6gap ugeb8 u8gn + ugo3s4 u1ha u1he u1hi uh1le u1ho uh1re u1hu uh1w u1h� u1h� + 6ui ui5en u1ig u3ins uin8tes u5isch. u1j 6uk u1ke u1ki u1kl u8klu + u1k6n u5ky u1la uld8se u1le ul8lac ul6lau ul6le6l ul6lo ulni8 u1lo + ulo6i ult6a ult8e u1lu ul2vr u1l� u1l� 3umfan 5umlau umo8f + um8pho u1mu umu8s u5m� u1n1a un2al un6at unau2 6und. 5undein + un4d3um 3undzw und�8 un8d�b une2b un1ec une2h un3eis 3unfal + 1unf� 5ungea 3ungl� ung2s1 un8g� 1u2nif un4it un8kro + unk5s u1no unpa2 uns2p unvol4 unvoll5 u5os. u1pa u1pi u1p2l u1pr + up4s3t up2t1a u1q u1ra ur5abs ura8d ur5ah u6rak ur3alt u6rana u6r5ans + u8rap ur5a6ri u8ratt u1re ur3eig ur8gri u1ri ur5ins 3urlau urmen6 + ur8nan u1ro 3ursac ur8sau ur8sei ur4sk 3urtei u1ru uru5i6 uru6r u1ry + ur2za ur6z� ur5�6m u5r� u1r� ur�ck3 u1sa + usa4gi u2s1ar u2s1au u8schec usch5wi u2s1ei use8kel u8sl u4st3a4b + us3tau + + u2s1uf u8surn ut1ac u1tal uta8m u1tan ut1ar u1tas ut1au + u1te u8teic u4tent u8terf u6terin u4t3hei ut5ho ut1hu u1ti utine5 + uti6q u1to uto5c u1tr ut1sa ut1s6p ut6stro u1tu utz5w u1u u1v uve5n + uve3r4� u1w u1xe u5ya uy5e6 u1yi u2z1eh u8zerh u5� u�e6n + u�en5e 8vanb 6vang 6varb var8d va6t5a va8tei + va2t1r 2v1b 6v5c 6vd 1ve 6ve5g6 ver1 ver5b verb8l ve2re2 verg8 ve2ru8 + ve1s ve2s3p ve3xe 2v1f 2v1g 6v5h vi6el vie6w5 vi1g4 vi8leh vil6le. + 8vint vi1ru vi1tr 2v1k 2v1l 2v1m 4v5n 8vo8f voi6le vol8lend vol8li + v2or1 vo2re vo8rin vo2ro 2v1p 8vra v6re + 2v2s + 2v1t 2v1v 4v3w 2v1z + waffe8 wa6g5n 1wah wah8n wa5la wal8din wal6ta wan4dr 5ware wa8ru + war4za 1was w5c w1d 5wech we6fl 1weg we8geng weg5h weg3l we2g1r + weh6r5er 5weise weit3r wel2t welt3r we6rat 8werc 5werdu wer4fl 5werk. + wer4ka wer8ku wer4ta wer8term we2sp + we8s4tend + + we8str + we8st� wet8ta wich6s5t 1wid wi2dr wiede4 wieder5 wik6 wim6ma + win4d3r 5wirt wisch5l 1wj 6wk 2w1l 8w1n wo1c woche6 wol6f wor6t5r 6ws2 + w1sk 6w5t 5wunde. wun6gr wu1sc wu2t1 6w5w wy5a w�rme5 w�1sc + 1xag x1ak x3a4men 8xamt x1an 8x1b x1c 1xe. x3e4g 1xen xe1ro x1erz + 1xes 8xf x1g 8x1h 1xi 8xid xi8so 4xiste x1k 6x1l x1m 8xn 1xo 8x5o6d + 8x3p2 x1r x1s6 8x1t x6tak x8terf x2t1h 1xu xu1e x5ul 6x3w x1z 5ya. + y5an. y5ank y1b y1c y6cha y4chia y1d yen6n y5ern y1g y5h y5in y1j + y1k2 y1lak yl1al yla8m y5lax y1le y1lo y5lu y8mn ym1p2 y3mu y1na yno2d + yn1t y1on. y1o4p y5ou ypo1 y1pr y8ps y1r yri3e yr1r2 + + ys5iat ys8ty + y1t y3w y1z y�8m z5a6b zab5l 8za6d 1zah za5is 4z3ak 6z1am 5zange. + 8zanl 2z1ara 6z5as z5auf 3zaun 2z1b 6z1c 6z1d 1ze ze4dik 4z3eff 8zein + zei4ta zei8ters ze6la ze8lec zel8th 4zemp 6z5engel zen8zin 8zerg� + zer8i ze1ro zers8 zerta8 zer8tab zer8tag 8zerz ze8ste zeu6gr 2z1ex + 2z1f8 z1g 4z1h 1zi zi1en zi5es. 4z3imp zi1na 6z5inf 6z5inni zin6s5er + 8zinsuf zist5r zi5th zi1tr 6z1j 2z1k 2z1l 2z1m 6z1n 1zo zo6gl 4z3oh + zo1on zor6na8 4z1p z5q 6z1r 2z1s8 2z1t z4t3end z4t3hei z8thi 1zu zu3al + zu1b4 zu1f2 6z5uhr zun2a 8zunem zunf8 8zungl zu1o zup8fi zu1s8 zu1z + 2z1v zw8 z1wal 5zweck zwei3s z1wel z1wer z6werg 8z5wes 1zwi zwi1s + 6z1wo 1zy 2z1z zz8a zzi1s 1z� 1z� 6z�l. z�1le + 1z� 2z1�2b �1a6 �b1l �1che �3chi + �ch8sc �ch8sp �5chu �ck5a �d1a �d5era + �6d5ia �1e �5fa �f1l �ft6s �g1h + �g3le �6g5nan �g5str �1he �1hi �h1le + �h5ne 1�hnl �h1re �h5ri �h1ru �1hu + �h1w 6�i �1isc �6ische �5ism �5j + �1k �l1c �1le �8lei �l6schl �mi1e + �m8n �m8s �5na 5�nderu �ne5i8 �ng3l + �nk5l �1no �n6s5c �1pa �p6s5c 3�q + �r1c �1re �re8m 5�rgern �r6gl �1ri + 3�rmel �1ro �rt6s5 �1ru 3�rztl �5r� + �6s5chen �sen8s �s1th �ta8b �1te �teri4 + �ter5it �6thy �1ti 3�tk �1to �t8schl + �ts1p �5tu �ub1l �u1e 1�ug �u8ga + �u5i �1um. �1us. 1�u� �1z + �1b �1che �5chi + �ch8s2tei + �ch8str �cht6 + 5�6dem 5�ffn �1he �h1l8 �h1re �1hu + �1is �1ke 1�2ko 1�l. �l6k5l �l8pl + �1mu �5na �nig6s3 �1no �5o6t �pf3l + �p6s5c �1re �r8gli �1ri �r8tr �1ru + 5�sterr �1te �5th �1ti �1tu �1v �1w + �we8 �2z �b6e2 3�4ber1 �b1l �b1r + 5�2bu �1che �1chi �8ch3l �ch6s5c �8ck + �ck1a �ck5ers �d1a2 �6deu �di8t �2d1o4 + �d5s6 �ge4l5a �g1l �h5a �1he �8heh + �6h5erk �h1le �h1re �h1ru �1hu �h1w + �3k �1le �l4l5a �l8lo �l4ps �l6s5c + �1lu �n8da �n8fei �nk5l �n8za �n6zw + �5pi �1re �8rei �r8fl �r8fr �r8geng + �1ri �1ro �r8sta + + �1ru �se8n + �8sta �8stes + + �3ta �1te �1ti + �t8tr �1tu �t8zei �1v �1a8 5�a. + �8as �1b8 �1c �1d + 1�e �5ec 8�e8g 8�e8h + 2�1ei 8�em �1f8 �1g �1h + 1�i �1k �1l �1m + + �1n �1o �1p8 �5q + �1r �1s2 �st8 �1ta + �1te �t3hei �1ti �5to + �1tr 1�u8 6�5um �1v �1w + �1z + + 2s1ta. + i2s1tal + 2s1tani 2s1tan. + fe2s1ta + te2s1ta + + nd2ste + ve2ste + 3s2tec + 4s3techn + 3s2teg + 3s2teh + 3s2tein 3s2teig 3s2teif + 3s2tell 3s2telz + a4s3tel + 3s2temm + 3s2temp + 3s2tep + s3s2ter t3s2tern + 3s2teue + 6s4teuro + + bs2ti + te2s3ti + ve2sti + 3s2tic + + 3s2tieb + 3s2tieg + + 3s2tif + 3s2til + 3s2tim + 3s2tink + 3s2titu + + a2s1to + gu2s1to + ku2s1to + i2s1tol i2s1tor + ve2s1to + + 2s1tung + 2s7tus + o2s1tul + + + + aus3s4 + ens3s4 + gs3s4 + .mis2s1 + s2s1b8 + + s2s3chen + s2s3d + s2s5ec + + + 2s2s1ei + s2s3f + s2s1g + s2s3h + s2s3k + s2s3l + s2s3m + + s2s3n + s2s3p8 + s2s5q + s2s3r + s2s3s2 + sss2t8 + + + as2s3te + is2s3te + us2s3te + �s2s3te + s2st3hei + s2s3ti + s2s1to + s2s1tr + + 6ss5um + s2s3v + s2s3w + s2s3z + + + + 1cker. + 1ckert + 1ckad + 1cke. + 1ckel + 1cken + 4ck1ent + 1ckere + 1ckern + 1ckeru + 1ckie + 1ckig + 1ckun + + + diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index d5ce1bfc8d93..b6fc1e3722cc 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -28,14 +28,14 @@ tasks.withType(StandaloneRestIntegTestTask).configureEach { usesDefaultDistribution() } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // These fail in CI but only when run as part of checkPart2 and not individually. // Tracked in : tasks.named("javaRestTest").configure{enabled = false } tasks.named("yamlRestTest").configure{enabled = false } } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.withType(Test).configureEach { systemProperty 'es.failure_store_feature_flag_enabled', 'true' } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/FailureStoreMetricsWithIncrementalBulkIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/FailureStoreMetricsWithIncrementalBulkIT.java new file mode 100644 index 000000000000..2c9b7417b283 --- /dev/null +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/FailureStoreMetricsWithIncrementalBulkIT.java @@ -0,0 +1,251 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.FailureStoreMetrics; +import org.elasticsearch.action.bulk.IncrementalBulkService; +import org.elasticsearch.action.bulk.IndexDocFailureStoreStatus; +import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.AbstractRefCounted; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Strings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexingPressure; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; +import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class FailureStoreMetricsWithIncrementalBulkIT extends ESIntegTestCase { + + private static final List METRICS = List.of( + FailureStoreMetrics.METRIC_TOTAL, + FailureStoreMetrics.METRIC_FAILURE_STORE, + FailureStoreMetrics.METRIC_REJECTED + ); + + private static final String DATA_STREAM_NAME = "data-stream-incremental"; + + @Override + protected Collection> nodePlugins() { + return List.of(DataStreamsPlugin.class, TestTelemetryPlugin.class, MapperExtrasPlugin.class); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "2KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build(); + } + + public void testShortCircuitFailure() throws Exception { + createDataStreamWithFailureStore(); + + String coordinatingOnlyNode = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, coordinatingOnlyNode); + try (IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest()) { + + AtomicBoolean nextRequested = new AtomicBoolean(true); + int successfullyStored = 0; + while (nextRequested.get()) { + nextRequested.set(false); + refCounted.incRef(); + handler.addItems(List.of(indexRequest(DATA_STREAM_NAME)), refCounted::decRef, () -> nextRequested.set(true)); + successfullyStored++; + } + assertBusy(() -> assertTrue(nextRequested.get())); + var metrics = collectTelemetry(); + assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_TOTAL, DATA_STREAM_NAME, successfullyStored); + assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_FAILURE_STORE, DATA_STREAM_NAME, 0); + assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_REJECTED, DATA_STREAM_NAME, 0); + + // Introduce artificial pressure that will reject the following requests + String node = findNodeOfPrimaryShard(DATA_STREAM_NAME); + IndexingPressure primaryPressure = internalCluster().getInstance(IndexingPressure.class, node); + long memoryLimit = primaryPressure.stats().getMemoryLimit(); + long primaryRejections = primaryPressure.stats().getPrimaryRejections(); + try (Releasable ignored = primaryPressure.markPrimaryOperationStarted(10, memoryLimit, false)) { + while (primaryPressure.stats().getPrimaryRejections() == primaryRejections) { + while (nextRequested.get()) { + nextRequested.set(false); + refCounted.incRef(); + List> requests = new ArrayList<>(); + for (int i = 0; i < 20; ++i) { + requests.add(indexRequest(DATA_STREAM_NAME)); + } + handler.addItems(requests, refCounted::decRef, () -> nextRequested.set(true)); + } + assertBusy(() -> assertTrue(nextRequested.get())); + } + } + + while (nextRequested.get()) { + nextRequested.set(false); + refCounted.incRef(); + handler.addItems(List.of(indexRequest(DATA_STREAM_NAME)), refCounted::decRef, () -> nextRequested.set(true)); + } + + assertBusy(() -> assertTrue(nextRequested.get())); + + PlainActionFuture future = new PlainActionFuture<>(); + handler.lastItems(List.of(indexRequest(DATA_STREAM_NAME)), refCounted::decRef, future); + + BulkResponse bulkResponse = safeGet(future); + + for (int i = 0; i < bulkResponse.getItems().length; ++i) { + // the first requests were successful + boolean hasFailed = i >= successfullyStored; + assertThat(bulkResponse.getItems()[i].isFailed(), is(hasFailed)); + assertThat(bulkResponse.getItems()[i].getFailureStoreStatus(), is(IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN)); + } + + metrics = collectTelemetry(); + assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_TOTAL, DATA_STREAM_NAME, bulkResponse.getItems().length); + assertDataStreamMetric( + metrics, + FailureStoreMetrics.METRIC_REJECTED, + DATA_STREAM_NAME, + bulkResponse.getItems().length - successfullyStored + ); + assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_FAILURE_STORE, DATA_STREAM_NAME, 0); + } + } + + private void createDataStreamWithFailureStore() throws IOException { + TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request( + "template-incremental" + ); + request.indexTemplate( + ComposableIndexTemplate.builder() + .indexPatterns(List.of(DATA_STREAM_NAME + "*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, true)) + .template(new Template(null, new CompressedXContent(""" + { + "dynamic": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "count": { + "type": "long" + } + } + }"""), null)) + .build() + ); + assertAcked(safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request))); + + final var createDataStreamRequest = new CreateDataStreamAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + DATA_STREAM_NAME + ); + assertAcked(safeGet(client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest))); + } + + private static Map> collectTelemetry() { + Map> measurements = new HashMap<>(); + for (PluginsService pluginsService : internalCluster().getInstances(PluginsService.class)) { + final TestTelemetryPlugin telemetryPlugin = pluginsService.filterPlugins(TestTelemetryPlugin.class).findFirst().orElseThrow(); + + telemetryPlugin.collect(); + + for (String metricName : METRICS) { + measurements.put(metricName, telemetryPlugin.getLongCounterMeasurement(metricName)); + } + } + return measurements; + } + + private void assertDataStreamMetric(Map> metrics, String metric, String dataStreamName, int expectedValue) { + List measurements = metrics.get(metric); + assertThat(measurements, notNullValue()); + long totalValue = measurements.stream() + .filter(m -> m.attributes().get("data_stream").equals(dataStreamName)) + .mapToLong(Measurement::getLong) + .sum(); + assertThat(totalValue, equalTo((long) expectedValue)); + } + + private static IndexRequest indexRequest(String dataStreamName) { + String time = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + String value = "1"; + return new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE) + .source(Strings.format("{\"%s\":\"%s\", \"count\": %s}", DEFAULT_TIMESTAMP_FIELD, time, value), XContentType.JSON); + } + + protected static String findNodeOfPrimaryShard(String dataStreamName) { + GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request( + TEST_REQUEST_TIMEOUT, + new String[] { dataStreamName } + ); + GetDataStreamAction.Response getDataStreamResponse = safeGet(client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest)); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + DataStream dataStream = getDataStreamResponse.getDataStreams().getFirst().getDataStream(); + assertThat(dataStream.getName(), equalTo(DATA_STREAM_NAME)); + assertThat(dataStream.getIndices().size(), equalTo(1)); + String backingIndex = dataStream.getIndices().getFirst().getName(); + assertThat(backingIndex, backingIndexEqualTo(DATA_STREAM_NAME, 1)); + + Index index = resolveIndex(backingIndex); + int shardId = 0; + for (String node : internalCluster().getNodeNames()) { + var indicesService = internalCluster().getInstance(IndicesService.class, node); + IndexService indexService = indicesService.indexService(index); + if (indexService != null) { + IndexShard shard = indexService.getShardOrNull(shardId); + if (shard != null && shard.isActive() && shard.routingEntry().primary()) { + return node; + } + } + } + throw new AssertionError("IndexShard instance not found for shard " + new ShardId(index, shardId)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index f60a3e5c47a7..f090186480b7 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -9,7 +9,6 @@ package org.elasticsearch.datastreams; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.datastreams.autosharding.DataStreamAutoShardingService; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; @@ -17,7 +16,6 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; /** @@ -25,14 +23,8 @@ */ public class DataStreamFeatures implements FeatureSpecification { - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); public static final NodeFeature DATA_STREAM_FAILURE_STORE_TSDB_FIX = new NodeFeature("data_stream.failure_store.tsdb_fix"); - @Override - public Map getHistoricalFeatures() { - return Map.of(DATA_STREAM_LIFECYCLE, Version.V_8_11_0); - } - @Override public Set getFeatures() { return Set.of( diff --git a/modules/ingest-attachment/build.gradle b/modules/ingest-attachment/build.gradle index 92e843fa31a6..821de8f834a4 100644 --- a/modules/ingest-attachment/build.gradle +++ b/modules/ingest-attachment/build.gradle @@ -143,7 +143,7 @@ tasks.named("thirdPartyAudit").configure { ignoreMissingClasses() } -if (BuildParams.inFipsJvm) { +if (buildParams.inFipsJvm) { tasks.named("test").configure { enabled = false } tasks.named("yamlRestTest").configure { enabled = false }; tasks.named("yamlRestCompatTest").configure { enabled = false }; diff --git a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle index b51fa497c849..8e7d20108a86 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle +++ b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle @@ -24,7 +24,7 @@ dependencies { // once we are ready to test migrations from 8.x to 9.x, we can set the compatible version to 8.0.0 // see https://github.com/elastic/elasticsearch/pull/93666 -BuildParams.bwcVersions.withWireCompatible(v -> v.before("9.0.0")) { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible(v -> v.before("9.0.0")) { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.fields.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.fields.txt index a739635e85a9..875b9a1dac3e 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.fields.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.fields.txt @@ -132,6 +132,21 @@ class org.elasticsearch.script.field.SeqNoDocValuesField @dynamic_type { class org.elasticsearch.script.field.VersionDocValuesField @dynamic_type { } +class org.elasticsearch.script.field.vectors.MultiDenseVector { + MultiDenseVector EMPTY + float[] getMagnitudes() + + Iterator getVectors() + boolean isEmpty() + int getDims() + int size() +} + +class org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField { + MultiDenseVector get() + MultiDenseVector get(MultiDenseVector) +} + class org.elasticsearch.script.field.vectors.DenseVector { DenseVector EMPTY float getMagnitude() diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt index 7ab9eb32852b..b2db0d1006d4 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt @@ -123,6 +123,11 @@ class org.elasticsearch.index.mapper.vectors.DenseVectorScriptDocValues { float getMagnitude() } +class org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValues { + Iterator getVectorValues() + float[] getMagnitudes() +} + class org.apache.lucene.util.BytesRef { byte[] bytes int offset diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/181_multi_dense_vector_dv_fields_api.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/181_multi_dense_vector_dv_fields_api.yml new file mode 100644 index 000000000000..66cb3f3c46fc --- /dev/null +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/181_multi_dense_vector_dv_fields_api.yml @@ -0,0 +1,178 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ multi_dense_vector_script_access ] + test_runner_features: capabilities + reason: "Support for multi dense vector field script access capability required" + - skip: + features: headers + + - do: + indices.create: + index: test-index + body: + settings: + number_of_shards: 1 + mappings: + properties: + vector: + type: multi_dense_vector + dims: 5 + byte_vector: + type: multi_dense_vector + dims: 5 + element_type: byte + bit_vector: + type: multi_dense_vector + dims: 40 + element_type: bit + - do: + index: + index: test-index + id: "1" + body: + vector: [[230.0, 300.33, -34.8988, 15.555, -200.0], [-0.5, 100.0, -13, 14.8, -156.0]] + byte_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + bit_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + + - do: + index: + index: test-index + id: "3" + body: + vector: [[0.5, 111.3, -13.0, 14.8, -156.0]] + byte_vector: [[2, 18, -5, 0, -124]] + bit_vector: [[2, 18, -5, 0, -124]] + + - do: + indices.refresh: {} +--- +"Test vector magnitude equality": + - skip: + features: close_to + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "doc['vector'].magnitudes[0]" + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 429.6021, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 192.6447, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "doc['byte_vector'].magnitudes[0]" + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 125.41531, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 19.07878, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "doc['bit_vector'].magnitudes[0]" + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 3.872983, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 3.464101, error: 0.01}} +--- +"Test vector value scoring": + - skip: + features: close_to + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "doc['vector'].vectorValues.next()[0];" + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 230, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.5, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "doc['byte_vector'].vectorValues.next()[0];" + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 8, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 2, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "doc['bit_vector'].vectorValues.next()[0];" + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 8, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 2, error: 0.01}} diff --git a/modules/legacy-geo/build.gradle b/modules/legacy-geo/build.gradle index d93627636234..1b4fd9d52bba 100644 --- a/modules/legacy-geo/build.gradle +++ b/modules/legacy-geo/build.gradle @@ -26,7 +26,7 @@ dependencies { testImplementation project(":test:framework") } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/modules/mapper-extras/build.gradle b/modules/mapper-extras/build.gradle index 35842ad27643..a7bdc11e1555 100644 --- a/modules/mapper-extras/build.gradle +++ b/modules/mapper-extras/build.gradle @@ -24,7 +24,7 @@ restResources { } } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 14a6b1e3f5b8..bb1500ba5566 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -132,7 +132,7 @@ if (OS.current() == OS.WINDOWS) { oldEsDependency.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); TaskProvider fixture = tasks.register("oldEs${version}Fixture", AntFixture) { dependsOn project.configurations.oldesFixture, jdks.legacy, oldEsDependency - executable = "${BuildParams.runtimeJavaHome}/bin/java" + executable = "${buildParams.runtimeJavaHome.get()}/bin/java" env 'CLASSPATH', "${-> project.configurations.oldesFixture.asPath}" // old versions of Elasticsearch need JAVA_HOME env 'JAVA_HOME', jdks.legacy.javaHomePath diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index 86776e743685..4babac68f1e7 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -321,7 +321,7 @@ tasks.register("workloadIdentityYamlRestTest", RestIntegTestTask) { // omitting key and sas_token so that we use a bearer token from workload identity } -if (BuildParams.inFipsJvm) { +if (buildParams.inFipsJvm) { // Cannot override the trust store in FIPS mode, and these tasks require a HTTPS fixture tasks.named("managedIdentityYamlRestTest").configure { enabled = false } tasks.named("workloadIdentityYamlRestTest").configure { enabled = false } diff --git a/modules/repository-gcs/build.gradle b/modules/repository-gcs/build.gradle index 246611e4803a..605d886a7105 100644 --- a/modules/repository-gcs/build.gradle +++ b/modules/repository-gcs/build.gradle @@ -178,7 +178,7 @@ tasks.named("thirdPartyAudit").configure { ) - if(BuildParams.graalVmRuntime == false) { + if(buildParams.graalVmRuntime == false) { ignoreMissingClasses( 'org.graalvm.nativeimage.hosted.Feature', 'org.graalvm.nativeimage.hosted.Feature$BeforeAnalysisAccess', @@ -240,7 +240,7 @@ def gcsThirdPartyTest = tasks.register("gcsThirdPartyUnitTest", Test) { systemProperty 'tests.security.manager', false systemProperty 'test.google.bucket', gcsBucket systemProperty 'test.google.fixture', Boolean.toString(useFixture) - nonInputProperties.systemProperty 'test.google.base', gcsBasePath + "_third_party_tests_" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.google.base', gcsBasePath + "_third_party_tests_" + buildParams.testSeed if (useFixture == false) { nonInputProperties.systemProperty 'test.google.account', "${-> encodedCredentials.call()}" } diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index 59dfa6b9aace..c1cd1a13719a 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -115,7 +115,7 @@ String s3ECSBasePath = System.getenv("amazon_s3_base_path_ecs") String s3STSBucket = System.getenv("amazon_s3_bucket_sts") String s3STSBasePath = System.getenv("amazon_s3_base_path_sts") -boolean s3DisableChunkedEncoding = BuildParams.random.nextBoolean() +boolean s3DisableChunkedEncoding = buildParams.random.nextBoolean() // If all these variables are missing then we are testing against the internal fixture instead, which has the following // credentials hard-coded in. @@ -203,7 +203,7 @@ tasks.register("s3ThirdPartyTest", Test) { systemProperty 'test.s3.account', s3PermanentAccessKey systemProperty 'test.s3.key', s3PermanentSecretKey systemProperty 'test.s3.bucket', s3PermanentBucket - nonInputProperties.systemProperty 'test.s3.base', s3PermanentBasePath + "_third_party_tests_" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.s3.base', s3PermanentBasePath + "_third_party_tests_" + buildParams.testSeed } tasks.named("thirdPartyAudit").configure { diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index fde15d5d6e6b..591350c34ab8 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -318,8 +318,7 @@ class S3Repository extends MeteredBlobStoreRepository { deprecationLogger.critical( DeprecationCategory.SECURITY, "s3_repository_secret_settings", - "Using s3 access/secret key from repository settings. Instead " - + "store these in named clients and the elasticsearch keystore for secure settings." + INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } @@ -336,6 +335,11 @@ class S3Repository extends MeteredBlobStoreRepository { ); } + static final String INSECURE_CREDENTIALS_DEPRECATION_WARNING = Strings.format(""" + This repository's settings include a S3 access key and secret key, but repository settings are stored in plaintext and must not be \ + used for security-sensitive information. Instead, store all secure settings in the keystore. See [%s] for more information.\ + """, ReferenceDocs.SECURE_SETTINGS); + private static Map buildLocation(RepositoryMetadata metadata) { return Map.of("base_path", BASE_PATH_SETTING.get(metadata.settings()), "bucket", BUCKET_SETTING.get(metadata.settings())); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 52fe152ba41e..8e5f6634372d 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -107,10 +107,9 @@ public void testRepositoryCredentialsOverrideSecureCredentials() { assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); assertCriticalWarnings( + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release.", "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release.", - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings.", - "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release." + S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } @@ -194,10 +193,9 @@ public void testReinitSecureCredentials() { if (hasInsecureSettings) { assertCriticalWarnings( + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release.", "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release.", - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings.", - "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release." + S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } } @@ -238,10 +236,7 @@ public void sendResponse(RestResponse response) { throw error.get(); } - assertWarnings( - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings." - ); + assertWarnings(S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING); } private void createRepository(final String name, final Settings repositorySettings) { diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 1e84f65cdd84..3095139ca468 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -174,7 +174,7 @@ public void testClientConnectionCloseMidStream() throws Exception { // await stream handler is ready and request full content var handler = ctx.awaitRestChannelAccepted(opaqueId); - assertBusy(() -> assertNotNull(handler.stream.buf())); + assertBusy(() -> assertNotEquals(0, handler.stream.bufSize())); assertFalse(handler.streamClosed); @@ -187,7 +187,7 @@ public void testClientConnectionCloseMidStream() throws Exception { // wait for resources to be released assertBusy(() -> { - assertNull(handler.stream.buf()); + assertEquals(0, handler.stream.bufSize()); assertTrue(handler.streamClosed); }); } @@ -204,15 +204,13 @@ public void testServerCloseConnectionMidStream() throws Exception { // await stream handler is ready and request full content var handler = ctx.awaitRestChannelAccepted(opaqueId); - assertBusy(() -> assertNotNull(handler.stream.buf())); + assertBusy(() -> assertNotEquals(0, handler.stream.bufSize())); assertFalse(handler.streamClosed); // terminate connection on server and wait resources are released handler.channel.request().getHttpChannel().close(); assertBusy(() -> { - // Cannot be simplified to assertNull. - // assertNull requires object to not fail on toString() method, but closing buffer can - assertTrue(handler.stream.buf() == null); + assertEquals(0, handler.stream.bufSize()); assertTrue(handler.streamClosed); }); } @@ -228,14 +226,14 @@ public void testServerExceptionMidStream() throws Exception { // await stream handler is ready and request full content var handler = ctx.awaitRestChannelAccepted(opaqueId); - assertBusy(() -> assertNotNull(handler.stream.buf())); + assertBusy(() -> assertNotEquals(0, handler.stream.bufSize())); assertFalse(handler.streamClosed); handler.shouldThrowInsideHandleChunk = true; handler.stream.next(); assertBusy(() -> { - assertNull(handler.stream.buf()); + assertEquals(0, handler.stream.bufSize()); assertTrue(handler.streamClosed); }); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStream.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStream.java index 238faa7a9237..ac3e3aecf97b 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStream.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStream.java @@ -37,12 +37,15 @@ public class Netty4HttpRequestBodyStream implements HttpBody.Stream { private final List tracingHandlers = new ArrayList<>(4); private final ThreadContext threadContext; private ByteBuf buf; - private boolean hasLast = false; private boolean requested = false; private boolean closing = false; private HttpBody.ChunkHandler handler; private ThreadContext.StoredContext requestContext; + // used in tests + private volatile int bufSize = 0; + private volatile boolean hasLast = false; + public Netty4HttpRequestBodyStream(Channel channel, ThreadContext threadContext) { this.channel = channel; this.threadContext = threadContext; @@ -112,11 +115,12 @@ private void addChunk(ByteBuf chunk) { comp.addComponent(true, chunk); buf = comp; } + bufSize = buf.readableBytes(); } // visible for test - ByteBuf buf() { - return buf; + int bufSize() { + return bufSize; } // visible for test @@ -130,6 +134,7 @@ private void send() { var bytesRef = Netty4Utils.toReleasableBytesReference(buf); requested = false; buf = null; + bufSize = 0; try (var ignored = threadContext.restoreExistingContext(requestContext)) { for (var tracer : tracingHandlers) { tracer.onNext(bytesRef, hasLast); @@ -164,6 +169,7 @@ private void doClose() { if (buf != null) { buf.release(); buf = null; + bufSize = 0; } channel.config().setAutoRead(true); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStreamTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStreamTests.java index 5ff5a27e2d55..d456bbecfbd2 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStreamTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStreamTests.java @@ -67,7 +67,7 @@ public void testEnqueueChunksBeforeRequest() { for (int i = 0; i < totalChunks; i++) { channel.writeInbound(randomContent(1024)); } - assertEquals(totalChunks * 1024, stream.buf().readableBytes()); + assertEquals(totalChunks * 1024, stream.bufSize()); } // ensures all received chunks can be flushed downstream @@ -119,7 +119,7 @@ public void testReadFromChannel() { channel.writeInbound(randomLastContent(chunkSize)); for (int i = 0; i < totalChunks; i++) { - assertNull("should not enqueue chunks", stream.buf()); + assertEquals("should not enqueue chunks", 0, stream.bufSize()); stream.next(); channel.runPendingTasks(); assertEquals("each next() should produce single chunk", i + 1, gotChunks.size()); diff --git a/muted-tests.yml b/muted-tests.yml index a9dda9ac8078..2b3c2a64d5ab 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -11,9 +11,6 @@ tests: - class: org.elasticsearch.xpack.restart.FullClusterRestartIT method: testDataStreams {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/111448 -- class: org.elasticsearch.upgrades.FullClusterRestartIT - method: testSnapshotRestore {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/111798 - class: org.elasticsearch.smoketest.WatcherYamlRestIT method: test {p0=watcher/usage/10_basic/Test watcher usage stats output} issue: https://github.com/elastic/elasticsearch/issues/112189 @@ -147,9 +144,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=cat.shards/10_basic/Help} issue: https://github.com/elastic/elasticsearch/issues/116110 -- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT - method: testSnapshotRestore {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/111799 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT method: testLookbackWithIndicesOptions issue: https://github.com/elastic/elasticsearch/issues/116127 @@ -215,9 +209,6 @@ tests: - class: org.elasticsearch.reservedstate.service.RepositoriesFileSettingsIT method: testSettingsApplied issue: https://github.com/elastic/elasticsearch/issues/116694 -- class: org.elasticsearch.snapshots.SnapshotShutdownIT - method: testRestartNodeDuringSnapshot - issue: https://github.com/elastic/elasticsearch/issues/116730 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryGroupsResolverTests issue: https://github.com/elastic/elasticsearch/issues/116182 - class: org.elasticsearch.xpack.test.rest.XPackRestIT @@ -229,9 +220,34 @@ tests: - class: org.elasticsearch.repositories.s3.RepositoryS3RestIT method: testReloadCredentialsFromKeystore issue: https://github.com/elastic/elasticsearch/issues/116811 -- class: org.elasticsearch.http.netty4.Netty4IncrementalRequestHandlingIT - method: testClientConnectionCloseMidStream - issue: https://github.com/elastic/elasticsearch/issues/116815 +- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT + issue: https://github.com/elastic/elasticsearch/issues/116851 +- class: org.elasticsearch.xpack.esql.analysis.VerifierTests + method: testCategorizeWithinAggregations + issue: https://github.com/elastic/elasticsearch/issues/116856 +- class: org.elasticsearch.xpack.esql.analysis.VerifierTests + method: testCategorizeSingleGrouping + issue: https://github.com/elastic/elasticsearch/issues/116857 +- class: org.elasticsearch.xpack.esql.analysis.VerifierTests + method: testCategorizeNestedGrouping + issue: https://github.com/elastic/elasticsearch/issues/116858 +- class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT + method: testRandomDirectoryIOExceptions + issue: https://github.com/elastic/elasticsearch/issues/114824 +- class: org.elasticsearch.xpack.inference.InferenceRestIT + method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} + issue: https://github.com/elastic/elasticsearch/issues/116542 +- class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests + method: testTermQuery + issue: https://github.com/elastic/elasticsearch/issues/116879 +- class: org.elasticsearch.xpack.inference.InferenceRestIT + issue: https://github.com/elastic/elasticsearch/issues/116899 +- class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT + method: testQueryBuilderBWC {p0=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/116989 +- class: org.elasticsearch.upgrades.QueryBuilderBWCIT + method: testQueryBuilderBWC {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/116990 # Examples: # diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 40b12c46c0bf..f9245ed32c32 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -28,7 +28,7 @@ dependencies { api "com.ibm.icu:icu4j:${versions.icu4j}" } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 6eb5b574b88f..16786c6c3107 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -63,7 +63,7 @@ TaskProvider createKey = tasks.register("createKey", LoggedExec) { keystore.parentFile.mkdirs() } outputs.file(keystore).withPropertyName('keystoreFile') - executable = "${BuildParams.runtimeJavaHome}/bin/keytool" + executable = "${buildParams.runtimeJavaHome.get()}/bin/keytool" getStandardInput().set('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n') args '-genkey', '-alias', 'test-node', diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index d9e86315d946..a166a89ad402 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -57,7 +57,7 @@ tasks.register("writeTestJavaPolicy") { throw new GradleException("failed to create temporary directory [${tmp}]") } final File javaPolicy = file("${tmp}/java.policy") - if (BuildParams.inFipsJvm) { + if (buildParams.inFipsJvm) { javaPolicy.write( [ "grant {", @@ -98,7 +98,7 @@ tasks.named("test").configure { // this is needed to manipulate com.amazonaws.sdk.ec2MetadataServiceEndpointOverride system property // it is better rather disable security manager at all with `systemProperty 'tests.security.manager', 'false'` - if (BuildParams.inFipsJvm){ + if (buildParams.inFipsJvm){ nonInputProperties.systemProperty 'java.security.policy', "=file://${buildDir}/tmp/java.policy" } else { nonInputProperties.systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy" diff --git a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle index 5cdcdc59cafe..aad59be37626 100644 --- a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle +++ b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle @@ -56,7 +56,7 @@ tasks.named("yamlRestTest").configure { enabled = false } TaskProvider fixture = tasks.register("ec2Fixture${action}", AntFixture) { dependsOn project.sourceSets.yamlRestTest.runtimeClasspath env 'CLASSPATH', "${-> project.sourceSets.yamlRestTest.runtimeClasspath.asPath}" - executable = "${BuildParams.runtimeJavaHome}/bin/java" + executable = "${buildParams.runtimeJavaHome.get()}/bin/java" args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/yamlRestTest${action}-1/config/unicast_hosts.txt" } diff --git a/plugins/discovery-gce/qa/gce/build.gradle b/plugins/discovery-gce/qa/gce/build.gradle index 14a904e10718..a22678b9a67d 100644 --- a/plugins/discovery-gce/qa/gce/build.gradle +++ b/plugins/discovery-gce/qa/gce/build.gradle @@ -32,7 +32,7 @@ restResources { def gceFixtureProvider = tasks.register("gceFixture", AntFixture) { dependsOn project.sourceSets.yamlRestTest.runtimeClasspath env 'CLASSPATH', "${-> project.sourceSets.yamlRestTest.runtimeClasspath.asPath}" - executable = "${BuildParams.runtimeJavaHome}/bin/java" + executable = "${buildParams.runtimeJavaHome.get()}/bin/java" args 'org.elasticsearch.cloud.gce.GCEFixture', baseDir, "${buildDir}/testclusters/yamlRestTest-1/config/unicast_hosts.txt" } diff --git a/plugins/mapper-annotated-text/build.gradle b/plugins/mapper-annotated-text/build.gradle index d0b116397061..545dfe49bfcf 100644 --- a/plugins/mapper-annotated-text/build.gradle +++ b/plugins/mapper-annotated-text/build.gradle @@ -16,7 +16,7 @@ esplugin { classname 'org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextPlugin' } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/plugins/mapper-murmur3/build.gradle b/plugins/mapper-murmur3/build.gradle index 0fa710c130a2..e5108814154a 100644 --- a/plugins/mapper-murmur3/build.gradle +++ b/plugins/mapper-murmur3/build.gradle @@ -22,7 +22,7 @@ dependencies { testImplementation project(':modules:lang-painless') } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 741542477e44..b7f7816a3a0e 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -84,7 +84,7 @@ tasks.named("dependencyLicenses").configure { tasks.withType(RestIntegTestTask).configureEach { usesDefaultDistribution() - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/plugins/repository-hdfs/hadoop-client-api/build.gradle b/plugins/repository-hdfs/hadoop-client-api/build.gradle index 4ac6f79530fc..24e4213780fe 100644 --- a/plugins/repository-hdfs/hadoop-client-api/build.gradle +++ b/plugins/repository-hdfs/hadoop-client-api/build.gradle @@ -1,5 +1,5 @@ apply plugin: 'elasticsearch.build' -apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'com.gradleup.shadow' dependencies { implementation "org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}" diff --git a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle index 585124f223c9..ce5b840e6dc9 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle +++ b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle @@ -16,7 +16,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.bwc-test' apply plugin: 'elasticsearch.rest-resources' -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> /** * We execute tests 3 times. @@ -52,7 +52,7 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.remote_cluster', remoteCluster.map(c -> c.allHttpSocketURI.join(","))) } - onlyIf("FIPS mode disabled") { BuildParams.inFipsJvm == false } + onlyIf("FIPS mode disabled") { buildParams.inFipsJvm == false } } tasks.register("${baseName}#oldClusterTest", StandaloneRestIntegTestTask) { diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 8d950eea616d..5e68c4d1ad26 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -7,14 +7,13 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-test-artifact' apply plugin: 'elasticsearch.bwc-test' -BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java new file mode 100644 index 000000000000..caa57f1e605a --- /dev/null +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import io.netty.handler.codec.http.HttpMethod; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ObjectPath; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import static org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.THRESHOLD_SETTING; + +/** + * Tests to run before and after a full cluster restart. This is run twice, + * one with {@code tests.is_old_cluster} set to {@code true} against a cluster + * of an older version. The cluster is shutdown and a cluster of the new + * version is started with the same data directories and then this is rerun + * with {@code tests.is_old_cluster} set to {@code false}. + */ +public class FullClusterRestartArchivedSettingsIT extends ParameterizedFullClusterRestartTestCase { + + private static TemporaryFolder repoDirectory = new TemporaryFolder(); + + protected static LocalClusterConfigProvider clusterConfig = c -> {}; + + private static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(2) + .setting("path.repo", () -> repoDirectory.getRoot().getPath()) + .setting("xpack.security.enabled", "false") + // some tests rely on the translog not being flushed + .setting("indices.memory.shard_inactive_time", "60m") + .apply(() -> clusterConfig) + .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + public FullClusterRestartArchivedSettingsIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { + super(upgradeStatus); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } + + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // this test is just about v8->v9 upgrades, remove it in v10 + public void testBalancedShardsAllocatorThreshold() throws Exception { + assumeTrue("test only applies for v8->v9 upgrades", getOldClusterTestVersion().getMajor() == 8); + + final var chosenValue = randomFrom("0", "0.1", "0.5", "0.999"); + + if (isRunningAgainstOldCluster()) { + final var request = newXContentRequest( + HttpMethod.PUT, + "/_cluster/settings", + (builder, params) -> builder.startObject("persistent").field(THRESHOLD_SETTING.getKey(), chosenValue).endObject() + ); + request.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE)); + assertOK(client().performRequest(request)); + } + + final var clusterSettingsResponse = ObjectPath.createFromResponse( + client().performRequest(new Request("GET", "/_cluster/settings")) + ); + + final var settingsPath = "persistent." + THRESHOLD_SETTING.getKey(); + final var settingValue = clusterSettingsResponse.evaluate(settingsPath); + + if (isRunningAgainstOldCluster()) { + assertEquals(chosenValue, settingValue); + } else { + assertNull(settingValue); + assertNotNull(clusterSettingsResponse.evaluate("persistent.archived." + THRESHOLD_SETTING.getKey())); + } + } +} diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index daadf936ae84..26e4f3146da2 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -16,11 +16,9 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.Build; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.common.Strings; @@ -29,7 +27,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -75,7 +72,6 @@ import static java.util.stream.Collectors.toList; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_INDEX_VERSION; import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; -import static org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.THRESHOLD_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; @@ -1959,35 +1955,4 @@ public static void assertNumHits(String index, int numHits, int totalShards) thr assertThat(XContentMapValues.extractValue("_shards.successful", resp), equalTo(totalShards)); assertThat(extractTotalHits(resp), equalTo(numHits)); } - - @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // this test is just about v8->v9 upgrades, remove it in v10 - public void testBalancedShardsAllocatorThreshold() throws Exception { - assumeTrue("test only applies for v8->v9 upgrades", getOldClusterTestVersion().getMajor() == 8); - - final var chosenValue = randomFrom("0", "0.1", "0.5", "0.999"); - - if (isRunningAgainstOldCluster()) { - final var request = newXContentRequest( - HttpMethod.PUT, - "/_cluster/settings", - (builder, params) -> builder.startObject("persistent").field(THRESHOLD_SETTING.getKey(), chosenValue).endObject() - ); - request.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE)); - assertOK(client().performRequest(request)); - } - - final var clusterSettingsResponse = ObjectPath.createFromResponse( - client().performRequest(new Request("GET", "/_cluster/settings")) - ); - - final var settingsPath = "persistent." + THRESHOLD_SETTING.getKey(); - final var settingValue = clusterSettingsResponse.evaluate(settingsPath); - - if (isRunningAgainstOldCluster()) { - assertEquals(chosenValue, settingValue); - } else { - assertNull(settingValue); - assertNotNull(clusterSettingsResponse.evaluate("persistent.archived." + THRESHOLD_SETTING.getKey())); - } - } } diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 9ca420efe115..aac2c661dea9 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -12,8 +12,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; @@ -23,7 +21,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.DisMaxQueryBuilder; @@ -43,7 +40,6 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.ClassRule; @@ -249,23 +245,10 @@ public void testQueryBuilderBWC() throws Exception { InputStream in = new ByteArrayInputStream(qbSource, 0, qbSource.length); StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry) ) { - - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // condition will always be true - var originalClusterHasTransportVersion = oldClusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED); - final TransportVersion transportVersion; - if (originalClusterHasTransportVersion == false) { - transportVersion = TransportVersion.fromId( - parseLegacyVersion(getOldClusterVersion()).map(Version::id).orElse(TransportVersions.MINIMUM_COMPATIBLE.id()) - ); - } else { - transportVersion = TransportVersion.readVersion(input); - } - - input.setTransportVersion(transportVersion); + input.setTransportVersion(TransportVersion.readVersion(input)); QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class); assert in.read() == -1; assertEquals(expectedQueryBuilder, queryBuilder); - } } } diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index f3fd57f3fc8a..f6549a2d83fe 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -10,7 +10,6 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' @@ -64,8 +63,7 @@ excludeList.add('indices.resolve_index/20_resolve_system_index/*') // Excluded because the error has changed excludeList.add('aggregations/percentiles_hdr_metric/Negative values test') -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> - +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> if (bwcVersion != VersionProperties.getElasticsearchVersion()) { /* This project runs the core REST tests against a 4 node cluster where two of the nodes has a different minor. */ diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 146acedd164b..906a49134bb5 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -35,7 +35,7 @@ def ccsSupportedVersion = bwcVersion -> { return currentVersion.minor == 0 || (currentVersion.major == bwcVersion.major && currentVersion.minor - bwcVersion.minor <= 1) } -BuildParams.bwcVersions.withWireCompatible(ccsSupportedVersion) { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible(ccsSupportedVersion) { bwcVersion, baseName -> def remoteCluster = testClusters.register("${baseName}-remote") { numberOfNodes = 2 diff --git a/qa/repository-multi-version/build.gradle b/qa/repository-multi-version/build.gradle index 17888efaa2b4..79a8be4c1be2 100644 --- a/qa/repository-multi-version/build.gradle +++ b/qa/repository-multi-version/build.gradle @@ -16,7 +16,7 @@ apply plugin: 'elasticsearch.internal-test-artifact' apply plugin: 'elasticsearch.bwc-test' -BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> String oldClusterName = "${baseName}-old" String newClusterName = "${baseName}-new" diff --git a/qa/rolling-upgrade-legacy/build.gradle b/qa/rolling-upgrade-legacy/build.gradle index 4ebb3888e9f2..e1c31fd50c0d 100644 --- a/qa/rolling-upgrade-legacy/build.gradle +++ b/qa/rolling-upgrade-legacy/build.gradle @@ -10,7 +10,6 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' @@ -18,7 +17,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.bwc-test' apply plugin: 'elasticsearch.rest-resources' -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> /* * NOTE: This module is for the tests that were problematic when converting :qa:rolling-upgrade to the junit-based bwc test definition * Over time, these should be migrated into the :qa:rolling-upgrade module and fixed properly diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index ef31f6421c18..2f717f201f24 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -18,7 +18,7 @@ testArtifacts { registerTestArtifactFromSourceSet(sourceSets.javaRestTest) } -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java deleted file mode 100644 index 2ed1b7fe9e79..000000000000 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.upgrades; - -import com.carrotsearch.randomizedtesting.annotations.Name; - -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.hamcrest.Matchers; - -import java.nio.charset.StandardCharsets; -import java.util.Map; - -import static org.hamcrest.CoreMatchers.equalTo; - -public class HealthNodeUpgradeIT extends AbstractRollingUpgradeTestCase { - - public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { - super(upgradedNodes); - } - - public void testHealthNode() throws Exception { - if (clusterHasFeature("health.supports_health")) { - assertBusy(() -> { - Response response = client().performRequest(new Request("GET", "_cat/tasks")); - String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); - assertThat(tasks, Matchers.containsString("health-node")); - }); - assertBusy(() -> { - String path = clusterHasFeature("health.supports_health_report_api") ? "_health_report" : "_internal/_health"; - Response response = client().performRequest(new Request("GET", path)); - Map health_report = entityAsMap(response.getEntity()); - assertThat(health_report.get("status"), equalTo("green")); - }); - } - } -} diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index af4e55a709a6..c707c2b5e8c8 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -8,7 +8,6 @@ */ import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/qa/verify-version-constants/build.gradle b/qa/verify-version-constants/build.gradle index f74ee7c59b26..ee29da53dc51 100644 --- a/qa/verify-version-constants/build.gradle +++ b/qa/verify-version-constants/build.gradle @@ -19,7 +19,7 @@ dependencies { testImplementation project(':modules:rest-root') } -BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> def baseCluster = testClusters.register(baseName) { version = bwcVersion.toString() setting 'xpack.security.enabled', 'true' diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json index 745136848786..cb4eee007a24 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-inference-api.html", "description": "Delete an inference endpoint" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get.json index 7b7aa0f56fcb..14e7519c3796 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get.json @@ -4,7 +4,7 @@ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-inference-api.html", "description":"Get an inference endpoint" }, - "stability":"experimental", + "stability":"stable", "visibility":"public", "headers":{ "accept": [ "application/json"] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json index 3195476ce1e9..eb4c1268c28c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json @@ -4,7 +4,7 @@ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/post-inference-api.html", "description":"Perform inference" }, - "stability":"experimental", + "stability":"stable", "visibility":"public", "headers":{ "accept": [ "application/json"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put.json index 9ff5ff4b80c5..411392fe3990 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put.json @@ -4,7 +4,7 @@ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/put-inference-api.html", "description":"Configure an inference endpoint for use in the Inference API" }, - "stability":"experimental", + "stability":"stable", "visibility":"public", "headers":{ "accept": [ "application/json"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_inference.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_inference.json index 32b4b2f31183..493306e10d5c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_inference.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_inference.json @@ -4,7 +4,7 @@ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/post-stream-inference-api.html", "description":"Perform streaming inference" }, - "stability":"experimental", + "stability":"stable", "visibility":"public", "headers":{ "accept": [ "text/event-stream"], diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml index db718959919d..54b2bf59c8dd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml @@ -129,7 +129,7 @@ noop update: {} --- -update: +regular update: - requires: cluster_features: ["gte_v8.2.0"] reason: tsdb indexing changed in 8.2.0 diff --git a/server/build.gradle b/server/build.gradle index ef64b0746dfc..bc8decfa8bab 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -133,7 +133,7 @@ def generatePluginsList = tasks.register("generatePluginsList") { sourceSets.main.output.dir(generatedResourcesDir) sourceSets.main.compiledBy(generateModulesList, generatePluginsList) -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' systemProperty 'es.failure_store_feature_flag_enabled', 'true' diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java index 9a71bf86388a..b3ec4a533118 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing.allocation.allocator; +import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteUtils; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterInfoServiceUtils; import org.elasticsearch.cluster.InternalClusterInfoService; @@ -68,6 +69,7 @@ public void testDesiredBalanceMetrics() { final var infoService = (InternalClusterInfoService) internalCluster().getCurrentMasterNodeInstance(ClusterInfoService.class); ClusterInfoServiceUtils.setUpdateFrequency(infoService, TimeValue.timeValueMillis(200)); assertNotNull("info should not be null", ClusterInfoServiceUtils.refresh(infoService)); + ClusterRerouteUtils.reroute(client()); // ensure we leverage the latest cluster info final var telemetryPlugin = getTelemetryPlugin(internalCluster().getMasterName()); telemetryPlugin.collect(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java index 039a596f53b3..38eef4f72062 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java @@ -19,14 +19,19 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest; +import org.elasticsearch.indices.recovery.RecoveryFilesInfoRequest; import org.elasticsearch.node.RecoverySettingsChunkSizePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportService; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -34,6 +39,7 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import static org.elasticsearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -72,16 +78,14 @@ public void testCancelRecoveryAndResume() throws Exception { // we use 2 nodes a lucky and unlucky one // the lucky one holds the primary // the unlucky one gets the replica and the truncated leftovers - NodeStats primariesNode = dataNodeStats.get(0); - NodeStats unluckyNode = dataNodeStats.get(1); + String primariesNode = dataNodeStats.get(0).getNode().getName(); + String unluckyNode = dataNodeStats.get(1).getNode().getName(); // create the index and prevent allocation on any other nodes than the lucky one // we have no replicas so far and make sure that we allocate the primary on the lucky node assertAcked( prepareCreate("test").setMapping("field1", "type=text", "the_id", "type=text") - .setSettings( - indexSettings(numberOfShards(), 0).put("index.routing.allocation.include._name", primariesNode.getNode().getName()) - ) + .setSettings(indexSettings(numberOfShards(), 0).put("index.routing.allocation.include._name", primariesNode)) ); // only allocate on the lucky node // index some docs and check if they are coming back @@ -102,20 +106,54 @@ public void testCancelRecoveryAndResume() throws Exception { indicesAdmin().prepareFlush().setForce(true).get(); // double flush to create safe commit in case of async durability indicesAdmin().prepareForceMerge().setMaxNumSegments(1).setFlush(true).get(); + // We write some garbage into the shard directory so that we can verify that it is cleaned up before we resend. + // Cleanup helps prevent recovery from failing due to lack of space from garbage left over from a previous + // recovery that crashed during file transmission. #104473 + // We can't look for the presence of the recovery temp files themselves because they are automatically + // cleaned up on clean shutdown by MultiFileWriter. + final String GARBAGE_PREFIX = "recovery.garbage."; + final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean truncate = new AtomicBoolean(true); + + IndicesService unluckyIndices = internalCluster().getInstance(IndicesService.class, unluckyNode); + Function getUnluckyIndexPath = (shardId) -> unluckyIndices.indexService(shardId.getIndex()) + .getShard(shardId.getId()) + .shardPath() + .resolveIndex(); + for (NodeStats dataNode : dataNodeStats) { MockTransportService.getInstance(dataNode.getNode().getName()) .addSendBehavior( - internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()), + internalCluster().getInstance(TransportService.class, unluckyNode), (connection, requestId, action, request, options) -> { if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; logger.info("file chunk [{}] lastChunk: {}", req, req.lastChunk()); + // During the first recovery attempt (when truncate is set), write an extra garbage file once for each + // file transmitted. We get multiple chunks per file but only one is the last. + if (truncate.get() && req.lastChunk()) { + final var shardPath = getUnluckyIndexPath.apply(req.shardId()); + final var garbagePath = Files.createTempFile(shardPath, GARBAGE_PREFIX, null); + logger.info("writing garbage at: {}", garbagePath); + } if ((req.name().endsWith("cfs") || req.name().endsWith("fdt")) && req.lastChunk() && truncate.get()) { latch.countDown(); throw new RuntimeException("Caused some truncated files for fun and profit"); } + } else if (action.equals(PeerRecoveryTargetService.Actions.FILES_INFO)) { + // verify there are no garbage files present at the FILES_INFO stage of recovery. This precedes FILES_CHUNKS + // and so will run before garbage has been introduced on the first attempt, and before post-transfer cleanup + // has been performed on the second. + final var shardPath = getUnluckyIndexPath.apply(((RecoveryFilesInfoRequest) request).shardId()); + try (var list = Files.list(shardPath).filter(path -> path.getFileName().startsWith(GARBAGE_PREFIX))) { + final var garbageFiles = list.toArray(); + assertArrayEquals( + "garbage files should have been cleaned before file transmission", + new Path[0], + garbageFiles + ); + } } connection.sendRequest(requestId, action, request, options); } @@ -128,14 +166,14 @@ public void testCancelRecoveryAndResume() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) .put( "index.routing.allocation.include._name", // now allow allocation on all nodes - primariesNode.getNode().getName() + "," + unluckyNode.getNode().getName() + primariesNode + "," + unluckyNode ), "test" ); latch.await(); - // at this point we got some truncated left overs on the replica on the unlucky node + // at this point we got some truncated leftovers on the replica on the unlucky node // now we are allowing the recovery to allocate again and finish to see if we wipe the truncated files truncate.compareAndSet(true, false); ensureGreen("test"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java index 980ef2a87c9c..e5e641bfdda2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java @@ -109,6 +109,7 @@ public void testRestartNodeDuringSnapshot() throws Exception { final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); final var snapshotFuture = startFullSnapshotBlockedOnDataNode(randomIdentifier(), repoName, originalNode); + safeAwait((ActionListener l) -> flushMasterQueue(clusterService, l)); final var snapshotCompletesWithoutPausingListener = ClusterServiceUtils.addTemporaryStateListener(clusterService, state -> { final var entriesForRepo = SnapshotsInProgress.get(state).forRepo(repoName); if (entriesForRepo.isEmpty()) { diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 3b3b06a1c692..35d1a44624b0 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -419,13 +419,12 @@ provides org.elasticsearch.features.FeatureSpecification with + org.elasticsearch.action.admin.indices.stats.IndicesStatsFeatures, org.elasticsearch.action.bulk.BulkFeatures, org.elasticsearch.features.FeatureInfrastructureFeatures, org.elasticsearch.health.HealthFeatures, - org.elasticsearch.cluster.service.TransportFeatures, org.elasticsearch.cluster.metadata.MetadataFeatures, org.elasticsearch.rest.RestFeatures, - org.elasticsearch.indices.IndicesFeatures, org.elasticsearch.repositories.RepositoriesFeatures, org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures, org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures, diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index b7da6115a1a4..a1fb24186106 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -104,6 +104,7 @@ static TransportVersion def(int id) { public static final TransportVersion V_8_14_0 = def(8_636_00_1); public static final TransportVersion V_8_15_0 = def(8_702_00_2); public static final TransportVersion V_8_15_2 = def(8_702_00_3); + public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_15 = def(8_702_00_4); public static final TransportVersion ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS = def(8_703_00_0); public static final TransportVersion INFERENCE_ADAPTIVE_ALLOCATIONS = def(8_704_00_0); public static final TransportVersion INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN = def(8_705_00_0); @@ -177,6 +178,7 @@ static TransportVersion def(int id) { public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ_BACKPORT_8_16 = def(8_772_00_1); public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_00_2); public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_00_3); + public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_00_4); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); @@ -197,6 +199,9 @@ static TransportVersion def(int id) { public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_00_0); public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_00_0); public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_00_0); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); + public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 9f727f49530a..98d6284fd91d 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -853,7 +853,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestClusterStateAction(settingsFilter, threadPool)); registerHandler.accept(new RestClusterHealthAction()); registerHandler.accept(new RestClusterUpdateSettingsAction()); - registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter, clusterSupportsFeature)); + registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter)); registerHandler.accept(new RestClusterRerouteAction(settingsFilter)); registerHandler.accept(new RestClusterSearchShardsAction()); registerHandler.accept(new RestPendingClusterTasksAction()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index 7857e9a22e9b..cb667400240f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.injection.guice.Inject; @@ -120,27 +119,18 @@ public void onPrimaryOperationComplete( ActionListener listener ) { assert replicaRequest.primaryRefreshResult.refreshed() : "primary has not refreshed"; - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get( - clusterService.state().metadata().index(indexShardRoutingTable.shardId().getIndex()).getSettings() + UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + replicaRequest.primaryRefreshResult.primaryTerm(), + replicaRequest.primaryRefreshResult.generation(), + false + ); + transportService.sendRequest( + transportService.getLocalNode(), + TransportUnpromotableShardRefreshAction.NAME, + unpromotableReplicaRequest, + new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) ); - - // Indices marked with fast refresh do not rely on refreshing the unpromotables - if (fastRefresh) { - listener.onResponse(null); - } else { - UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( - indexShardRoutingTable, - replicaRequest.primaryRefreshResult.primaryTerm(), - replicaRequest.primaryRefreshResult.generation(), - false - ); - transportService.sendRequest( - transportService.getLocalNode(), - TransportUnpromotableShardRefreshAction.NAME, - unpromotableReplicaRequest, - new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) - ); - } } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index 6c24ec2d1760..4458c008babc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -24,6 +24,9 @@ import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; +import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; + public class TransportUnpromotableShardRefreshAction extends TransportBroadcastUnpromotableAction< UnpromotableShardRefreshRequest, ActionResponse.Empty> { @@ -73,6 +76,18 @@ protected void unpromotableShardOperation( return; } + // During an upgrade to FAST_REFRESH_RCO_2, we expect search shards to be first upgraded before the primary is upgraded. Thus, + // when the primary is upgraded, and starts to deliver unpromotable refreshes, we expect the search shards to be upgraded already. + // Note that the fast refresh setting is final. + // TODO: remove assertion (ES-9563) + assert INDEX_FAST_REFRESH_SETTING.get(shard.indexSettings().getSettings()) == false + || transportService.getLocalNodeConnection().getTransportVersion().onOrAfter(FAST_REFRESH_RCO_2) + : "attempted to refresh a fast refresh search shard " + + shard + + " on transport version " + + transportService.getLocalNodeConnection().getTransportVersion() + + " (before FAST_REFRESH_RCO_2)"; + ActionListener.run(responseListener, listener -> { shard.waitForPrimaryTermAndGeneration( request.getPrimaryTerm(), diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java index 6106e620521f..5bdecd10075e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.NodeFeature; import java.util.ArrayList; import java.util.HashMap; @@ -21,6 +22,9 @@ public class IndexStats implements Iterable { + // feature was effectively reverted but we still need to keep this constant around + public static final NodeFeature REVERTED_TIER_CREATION_DATE = new NodeFeature("stats.tier_creation_date"); + private final String index; private final String uuid; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java similarity index 67% rename from server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java rename to server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java index bd39d125969c..558343db1023 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java @@ -7,17 +7,17 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.indices; +package org.elasticsearch.action.admin.indices.stats; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; +import java.util.Set; + +public class IndicesStatsFeatures implements FeatureSpecification { -public class IndicesFeatures implements FeatureSpecification { @Override - public Map getHistoricalFeatures() { - return Map.of(IndicesService.SUPPORTS_AUTO_PUT, Version.V_8_8_0); + public Set getFeatures() { + return Set.of(IndexStats.REVERTED_TIER_CREATION_DATE); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 65f7e1969ea6..91e0e7cbc1df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -54,7 +54,17 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { IndicesStatsResponse(StreamInput in) throws IOException { super(in); shards = in.readArray(ShardStats::new, ShardStats[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) { + indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); + indexStateMap = in.readMap(IndexMetadata.State::readFrom); + } else if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); + indexStateMap = in.readMap(IndexMetadata.State::readFrom); + in.readMap(StreamInput::readStringCollectionAsList); // unused, reverted + in.readMap(StreamInput::readLong); // unused, reverted + } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { + // Between 8.1 and INDEX_STATS_ADDITIONAL_FIELDS, we had a different format for the response + // where we only had health and state available. indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); indexStateMap = in.readMap(IndexMetadata.State::readFrom); } else { @@ -174,7 +184,15 @@ public CommonStats getPrimaries() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeArray(shards); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) { + out.writeMap(indexHealthMap, StreamOutput::writeWriteable); + out.writeMap(indexStateMap, StreamOutput::writeWriteable); + } else if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + out.writeMap(indexHealthMap, StreamOutput::writeWriteable); + out.writeMap(indexStateMap, StreamOutput::writeWriteable); + out.writeMap(Map.of(), StreamOutput::writeStringCollection); + out.writeMap(Map.of(), StreamOutput::writeLong); + } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { out.writeMap(indexHealthMap, StreamOutput::writeWriteable); out.writeMap(indexStateMap, StreamOutput::writeWriteable); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index ce3e18914945..ad1fda2534fa 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -42,6 +42,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -543,7 +544,8 @@ private IndexDocFailureStoreStatus processFailure(BulkItemRequest bulkItemReques var isFailureStoreRequest = isFailureStoreRequest(docWriteRequest); if (isFailureStoreRequest == false && failureStoreCandidate.isFailureStoreEnabled() - && error instanceof VersionConflictEngineException == false) { + && error instanceof VersionConflictEngineException == false + && error instanceof EsRejectedExecutionException == false) { // Prepare the data stream failure store if necessary maybeMarkFailureStoreForRollover(failureStoreCandidate); @@ -563,8 +565,8 @@ private IndexDocFailureStoreStatus processFailure(BulkItemRequest bulkItemReques } } else { // If we can't redirect to a failure store (because either the data stream doesn't have the failure store enabled - // or this request was already targeting a failure store), or this was a version conflict we increment the - // rejected counter. + // or this request was already targeting a failure store), or this was an error that is not eligible for the failure store + // such as a version conflict or a load rejection we increment the rejected counter. failureStoreMetrics.incrementRejected( bulkItemRequest.index(), errorType, diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 9e535344c958..fb4b3907d2bf 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -126,12 +126,10 @@ protected void asyncShardOperation(GetRequest request, ShardId shardId, ActionLi IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { - // TODO: Re-evaluate assertion (ES-8227) - // assert indexShard.indexSettings().isFastRefresh() == false - // : "a search shard should not receive a TransportGetAction for an index with fast refresh"; handleGetOnUnpromotableShard(request, indexShard, listener); return; } + // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportGetAction only if an index has the fast refresh setting"; if (request.realtime()) { // we are not tied to a refresh cycle here anyway diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 34b3ae50e0b5..633e7ef6793a 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -124,12 +124,10 @@ protected void asyncShardOperation(MultiGetShardRequest request, ShardId shardId IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { - // TODO: Re-evaluate assertion (ES-8227) - // assert indexShard.indexSettings().isFastRefresh() == false - // : "a search shard should not receive a TransportShardMultiGetAction for an index with fast refresh"; handleMultiGetOnUnpromotableShard(request, indexShard, listener); return; } + // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportShardMultiGetAction only if an index has " + "the fast refresh setting"; diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 683c3589c893..7414aeeb2c40 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; @@ -53,9 +52,7 @@ public void refreshShard( case WAIT_UNTIL -> waitUntil(indexShard, location, new ActionListener<>() { @Override public void onResponse(Boolean forced) { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (location != null && (indexShard.routingEntry().isSearchable() == false && fastRefresh == false)) { + if (location != null && indexShard.routingEntry().isSearchable() == false) { refreshUnpromotables(indexShard, location, listener, forced, postWriteRefreshTimeout); } else { listener.onResponse(forced); @@ -68,9 +65,7 @@ public void onFailure(Exception e) { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0 && fastRefresh == false) { + if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java index 6841c6e49e3f..62d3f5e5866b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java @@ -467,7 +467,7 @@ public void handleCommit(ApplyCommitRequest applyCommit) { logger.debug( "handleCommit: ignored commit request due to term mismatch " + "(expected: [term {} version {}], actual: [term {} version {}])", - getLastAcceptedTerm(), + getCurrentTerm(), getLastAcceptedVersion(), applyCommit.getTerm(), applyCommit.getVersion() diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java index 9b3abf38c519..0b9c359006b2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java @@ -46,7 +46,11 @@ public class RepositoryMetadata implements Writeable { * @param settings repository settings */ public RepositoryMetadata(String name, String type, Settings settings) { - this(name, RepositoryData.MISSING_UUID, type, settings, RepositoryData.UNKNOWN_REPO_GEN, RepositoryData.EMPTY_REPO_GEN); + this(name, RepositoryData.MISSING_UUID, type, settings); + } + + public RepositoryMetadata(String name, String uuid, String type, Settings settings) { + this(name, uuid, type, settings, RepositoryData.UNKNOWN_REPO_GEN, RepositoryData.EMPTY_REPO_GEN); } public RepositoryMetadata(RepositoryMetadata metadata, long generation, long pendingGeneration) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java index aa92f395b20d..be0e3429a2ce 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java @@ -167,8 +167,7 @@ public void process(IndexRequest indexRequest) { // generate id if not already provided final String id = indexRequest.id(); if (id == null) { - if (creationVersion.between(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) - || creationVersion.onOrAfter(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID) && indexMode == IndexMode.LOGSDB) { + if (shouldUseTimeBasedId(indexMode, creationVersion)) { indexRequest.autoGenerateTimeBasedId(); } else { indexRequest.autoGenerateId(); @@ -178,6 +177,15 @@ public void process(IndexRequest indexRequest) { } } + private static boolean shouldUseTimeBasedId(final IndexMode indexMode, final IndexVersion creationVersion) { + return indexMode == IndexMode.LOGSDB && isNewIndexVersion(creationVersion); + } + + private static boolean isNewIndexVersion(final IndexVersion creationVersion) { + return creationVersion.between(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + || creationVersion.onOrAfter(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID); + } + @Override public int indexShard( String id, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index f7812d284f2a..13fc874f52e9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -32,6 +32,7 @@ import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; public class OperationRouting { @@ -305,8 +306,14 @@ public ShardId shardId(ClusterState clusterState, String index, String id, @Null } public static boolean canSearchShard(ShardRouting shardRouting, ClusterState clusterState) { + // TODO: remove if and always return isSearchable (ES-9563) if (INDEX_FAST_REFRESH_SETTING.get(clusterState.metadata().index(shardRouting.index()).getSettings())) { - return shardRouting.isPromotableToPrimary(); + // Until all the cluster is upgraded, we send searches/gets to the primary (even if it has been upgraded) to execute locally. + if (clusterState.getMinTransportVersion().onOrAfter(FAST_REFRESH_RCO_2)) { + return shardRouting.isSearchable(); + } else { + return shardRouting.isPromotableToPrimary(); + } } else { return shardRouting.isSearchable(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java b/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java deleted file mode 100644 index 6e0a8afd6cf8..000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -public class TransportFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - // transport version was introduced in 8.8.0, but we need to wait until all nodes are >8.8.0 - // to properly detect when we need to fix transport versions - return Map.of(TransportVersionsFixupListener.FIX_TRANSPORT_VERSION, Version.V_8_8_1); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java b/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java deleted file mode 100644 index 0ae0f8b10aed..000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.version.CompatibilityVersions; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.Executor; -import java.util.stream.Collectors; - -import static org.elasticsearch.cluster.ClusterState.INFERRED_TRANSPORT_VERSION; - -/** - * This fixes up the transport version from pre-8.8.0 cluster state that was inferred as the minimum possible, - * due to the master node not understanding cluster state with transport versions added in 8.8.0. - * Any nodes with the inferred placeholder cluster state is then refreshed with their actual transport version - */ -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed in v9 -public class TransportVersionsFixupListener implements ClusterStateListener { - - private static final Logger logger = LogManager.getLogger(TransportVersionsFixupListener.class); - - static final NodeFeature FIX_TRANSPORT_VERSION = new NodeFeature("transport.fix_transport_version"); - - private static final TimeValue RETRY_TIME = TimeValue.timeValueSeconds(30); - - private final MasterServiceTaskQueue taskQueue; - private final ClusterAdminClient client; - private final Scheduler scheduler; - private final Executor executor; - private final Set pendingNodes = Collections.synchronizedSet(new HashSet<>()); - private final FeatureService featureService; - - public TransportVersionsFixupListener( - ClusterService service, - ClusterAdminClient client, - FeatureService featureService, - ThreadPool threadPool - ) { - // there tends to be a lot of state operations on an upgrade - this one is not time-critical, - // so use LOW priority. It just needs to be run at some point after upgrade. - this( - service.createTaskQueue("fixup-transport-versions", Priority.LOW, new TransportVersionUpdater()), - client, - featureService, - threadPool, - threadPool.executor(ThreadPool.Names.CLUSTER_COORDINATION) - ); - } - - TransportVersionsFixupListener( - MasterServiceTaskQueue taskQueue, - ClusterAdminClient client, - FeatureService featureService, - Scheduler scheduler, - Executor executor - ) { - this.taskQueue = taskQueue; - this.client = client; - this.featureService = featureService; - this.scheduler = scheduler; - this.executor = executor; - } - - class NodeTransportVersionTask implements ClusterStateTaskListener { - private final Map results; - private final int retryNum; - - NodeTransportVersionTask(Map results, int retryNum) { - this.results = results; - this.retryNum = retryNum; - } - - @Override - public void onFailure(Exception e) { - logger.error("Could not apply transport version for nodes {} to cluster state", results.keySet(), e); - scheduleRetry(results.keySet(), retryNum); - } - - public Map results() { - return results; - } - } - - private static class TransportVersionUpdater implements ClusterStateTaskExecutor { - @Override - public ClusterState execute(BatchExecutionContext context) throws Exception { - ClusterState.Builder builder = ClusterState.builder(context.initialState()); - boolean modified = false; - for (var c : context.taskContexts()) { - for (var e : c.getTask().results().entrySet()) { - // this node's transport version might have been updated already/node has gone away - var cvMap = builder.compatibilityVersions(); - TransportVersion recordedTv = Optional.ofNullable(cvMap.get(e.getKey())) - .map(CompatibilityVersions::transportVersion) - .orElse(null); - assert (recordedTv != null) || (context.initialState().nodes().nodeExists(e.getKey()) == false) - : "Node " + e.getKey() + " is in the cluster but does not have an associated transport version recorded"; - if (Objects.equals(recordedTv, INFERRED_TRANSPORT_VERSION)) { - builder.putCompatibilityVersions(e.getKey(), e.getValue(), Map.of()); // unknown mappings versions - modified = true; - } - } - c.success(() -> {}); - } - return modified ? builder.build() : context.initialState(); - } - } - - @SuppressForbidden(reason = "maintaining ClusterState#compatibilityVersions requires reading them") - private static Map getCompatibilityVersions(ClusterState clusterState) { - return clusterState.compatibilityVersions(); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.localNodeMaster() == false) return; // only if we're master - - // if the min node version > 8.8.0, and the cluster state has some transport versions == 8.8.0, - // then refresh all inferred transport versions to their real versions - // now that everything should understand cluster state with transport versions - if (featureService.clusterHasFeature(event.state(), FIX_TRANSPORT_VERSION) - && event.state().getMinTransportVersion().equals(INFERRED_TRANSPORT_VERSION)) { - - // find all the relevant nodes - Set nodes = getCompatibilityVersions(event.state()).entrySet() - .stream() - .filter(e -> e.getValue().transportVersion().equals(INFERRED_TRANSPORT_VERSION)) - .map(Map.Entry::getKey) - .collect(Collectors.toSet()); - - updateTransportVersions(nodes, 0); - } - } - - private void scheduleRetry(Set nodes, int thisRetryNum) { - // just keep retrying until this succeeds - logger.debug("Scheduling retry {} for nodes {}", thisRetryNum + 1, nodes); - scheduler.schedule(() -> updateTransportVersions(nodes, thisRetryNum + 1), RETRY_TIME, executor); - } - - private void updateTransportVersions(Set nodes, int retryNum) { - // some might already be in-progress - Set outstandingNodes = Sets.newHashSetWithExpectedSize(nodes.size()); - synchronized (pendingNodes) { - for (String n : nodes) { - if (pendingNodes.add(n)) { - outstandingNodes.add(n); - } - } - } - if (outstandingNodes.isEmpty()) { - // all nodes already have in-progress requests - return; - } - - NodesInfoRequest request = new NodesInfoRequest(outstandingNodes.toArray(String[]::new)); - request.clear(); // only requesting base data - client.nodesInfo(request, new ActionListener<>() { - @Override - public void onResponse(NodesInfoResponse response) { - pendingNodes.removeAll(outstandingNodes); - handleResponse(response, retryNum); - } - - @Override - public void onFailure(Exception e) { - pendingNodes.removeAll(outstandingNodes); - logger.warn("Could not read transport versions for nodes {}", outstandingNodes, e); - scheduleRetry(outstandingNodes, retryNum); - } - }); - } - - private void handleResponse(NodesInfoResponse response, int retryNum) { - if (response.hasFailures()) { - Set failedNodes = new HashSet<>(); - for (FailedNodeException fne : response.failures()) { - logger.warn("Failed to read transport version info from node {}", fne.nodeId(), fne); - failedNodes.add(fne.nodeId()); - } - scheduleRetry(failedNodes, retryNum); - } - // carry on and read what we can - - Map results = response.getNodes() - .stream() - .collect(Collectors.toUnmodifiableMap(n -> n.getNode().getId(), NodeInfo::getTransportVersion)); - - if (results.isEmpty() == false) { - taskQueue.submitTask("update-transport-version", new NodeTransportVersionTask(results, retryNum), null); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java index 926056fec3ec..c0fe0bc32fb0 100644 --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java @@ -82,6 +82,7 @@ public enum ReferenceDocs { CIRCUIT_BREAKER_ERRORS, ALLOCATION_EXPLAIN_NO_COPIES, ALLOCATION_EXPLAIN_MAX_RETRY, + SECURE_SETTINGS, // this comment keeps the ';' on the next line so every entry above has a trailing ',' which makes the diff for adding new links cleaner ; diff --git a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java index 36ca2df08724..3d4f0d2d9dbf 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java @@ -185,9 +185,7 @@ private InsecureStringSetting(String name) { @Override public SecureString get(Settings settings) { if (ALLOW_INSECURE_SETTINGS == false && exists(settings)) { - throw new IllegalArgumentException( - "Setting [" + name + "] is insecure, " + "but property [allow_insecure_settings] is not set" - ); + throw new IllegalArgumentException("Setting [" + name + "] is insecure, use the elasticsearch keystore instead"); } return super.get(settings); } diff --git a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java index 6d106199610d..091dbc0eae74 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java +++ b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java @@ -9,34 +9,17 @@ package org.elasticsearch.health; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; public class HealthFeatures implements FeatureSpecification { - public static final NodeFeature SUPPORTS_HEALTH = new NodeFeature("health.supports_health"); - public static final NodeFeature SUPPORTS_HEALTH_REPORT_API = new NodeFeature("health.supports_health_report_api"); - public static final NodeFeature SUPPORTS_SHARDS_CAPACITY_INDICATOR = new NodeFeature("health.shards_capacity_indicator"); public static final NodeFeature SUPPORTS_EXTENDED_REPOSITORY_INDICATOR = new NodeFeature("health.extended_repository_indicator"); @Override public Set getFeatures() { return Set.of(SUPPORTS_EXTENDED_REPOSITORY_INDICATOR); } - - @Override - public Map getHistoricalFeatures() { - return Map.of( - SUPPORTS_HEALTH, - Version.V_8_5_0, // health accessible via /_internal/_health - SUPPORTS_HEALTH_REPORT_API, - Version.V_8_7_0, // health accessible via /_health_report - SUPPORTS_SHARDS_CAPACITY_INDICATOR, - Version.V_8_8_0 - ); - } } diff --git a/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java b/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java index 44fc65fab534..0d30e157a3a0 100644 --- a/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java +++ b/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java @@ -28,7 +28,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.health.HealthFeatures; import java.util.List; import java.util.stream.Stream; @@ -137,7 +136,7 @@ private void updateOnHealthNodeEnabledChange(boolean enabled) { private boolean canPostClusterStateUpdates(ClusterState state) { // Wait until every node in the cluster supports health checks - return isMaster && state.clusterRecovered() && featureService.clusterHasFeature(state, HealthFeatures.SUPPORTS_HEALTH); + return isMaster && state.clusterRecovered(); } private void updateOnClusterStateChange(ClusterChangedEvent event) { diff --git a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java index e38ce7ac92a0..c975e1d1abd9 100644 --- a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -91,15 +90,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources ClusterState clusterState = clusterService.state(); Map diskHealthInfoMap = healthInfo.diskInfoByNode(); if (diskHealthInfoMap == null || diskHealthInfoMap.isEmpty()) { - if (featureService.clusterHasFeature(clusterState, HealthFeatures.SUPPORTS_HEALTH) == false) { - return createIndicator( - HealthStatus.GREEN, - "No disk usage data available. The cluster currently has mixed versions (an upgrade may be in progress).", - HealthIndicatorDetails.EMPTY, - List.of(), - List.of() - ); - } /* * If there is no disk health info, that either means that a new health node was just elected, or something is seriously * wrong with health data collection on the health node. Either way, we immediately return UNKNOWN. If there are at least diff --git a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java index a08de9abb4ae..aab9e972cba7 100644 --- a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java +++ b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.health.node.action.HealthNodeNotDiscoveredException; import org.elasticsearch.health.node.selection.HealthNode; @@ -200,7 +199,6 @@ public void clusterChanged(ClusterChangedEvent event) { } } prerequisitesFulfilled = event.state().clusterRecovered() - && featureService.clusterHasFeature(event.state(), HealthFeatures.SUPPORTS_HEALTH) && HealthMetadata.getFromClusterState(event.state()) != null && currentHealthNode != null && currentMasterNode != null; diff --git a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java index b02bbd95bb9a..4dd94cfc046c 100644 --- a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -111,15 +110,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources var state = clusterService.state(); var healthMetadata = HealthMetadata.getFromClusterState(state); if (healthMetadata == null || healthMetadata.getShardLimitsMetadata() == null) { - if (featureService.clusterHasFeature(state, HealthFeatures.SUPPORTS_SHARDS_CAPACITY_INDICATOR) == false) { - return createIndicator( - HealthStatus.GREEN, - "No shard limits configured yet. The cluster currently has mixed versions (an upgrade may be in progress).", - HealthIndicatorDetails.EMPTY, - List.of(), - List.of() - ); - } return unknownIndicator(); } diff --git a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java index 3357936e5f10..3efad1aee26b 100644 --- a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskParams; @@ -157,11 +156,8 @@ public PersistentTasksCustomMetadata.Assignment getAssignment( // visible for testing void startTask(ClusterChangedEvent event) { - // Wait until every node in the cluster supports health checks - if (event.localNodeMaster() - && event.state().clusterRecovered() - && HealthNode.findTask(event.state()) == null - && featureService.clusterHasFeature(event.state(), HealthFeatures.SUPPORTS_HEALTH)) { + // Wait until master is stable before starting health task + if (event.localNodeMaster() && event.state().clusterRecovered() && HealthNode.findTask(event.state()) == null) { persistentTasksService.sendStartRequest( TASK_NAME, TASK_NAME, diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 2c1175648c21..91c4b780db0b 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -189,7 +189,11 @@ public Analyzer getAnalyzer(String analyzer) throws IOException { } }); } - return analyzerProvider.get(environment, analyzer).get(); + + return overridePositionIncrementGap( + (NamedAnalyzer) analyzerProvider.get(environment, analyzer).get(), + TextFieldMapper.Defaults.POSITION_INCREMENT_GAP + ); } @Override @@ -720,13 +724,8 @@ private static NamedAnalyzer produceAnalyzer( throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer"); } NamedAnalyzer analyzer; - if (analyzerF instanceof NamedAnalyzer) { - // if we got a named analyzer back, use it... - analyzer = (NamedAnalyzer) analyzerF; - if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) { - // unless the positionIncrementGap needs to be overridden - analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap); - } + if (analyzerF instanceof NamedAnalyzer namedAnalyzer) { + analyzer = overridePositionIncrementGap(namedAnalyzer, overridePositionIncrementGap); } else { analyzer = new NamedAnalyzer(name, analyzerFactory.scope(), analyzerF, overridePositionIncrementGap); } @@ -734,6 +733,13 @@ private static NamedAnalyzer produceAnalyzer( return analyzer; } + private static NamedAnalyzer overridePositionIncrementGap(NamedAnalyzer analyzer, int overridePositionIncrementGap) { + if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) { + analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap); + } + return analyzer; + } + private static void processNormalizerFactory( String name, AnalyzerProvider normalizerFactory, diff --git a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 59607fadc0dd..33a8487bb33a 100644 --- a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -58,8 +58,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; - /** * This is a cache for {@link BitDocIdSet} based filters and is unbounded by size or time. *

@@ -105,10 +103,7 @@ static boolean shouldLoadRandomAccessFiltersEagerly(IndexSettings settings) { boolean loadFiltersEagerlySetting = settings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); boolean isStateless = DiscoveryNode.isStateless(settings.getNodeSettings()); if (isStateless) { - return loadFiltersEagerlySetting - && (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE) - || (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.INDEX_ROLE) - && INDEX_FAST_REFRESH_SETTING.get(settings.getSettings()))); + return loadFiltersEagerlySetting && DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE); } else { return loadFiltersEagerlySetting; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValues.java new file mode 100644 index 000000000000..a91960832239 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValues.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.vectors; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.script.field.vectors.MultiDenseVector; + +import java.util.Iterator; + +public class MultiDenseVectorScriptDocValues extends ScriptDocValues { + + public static final String MISSING_VECTOR_FIELD_MESSAGE = "A document doesn't have a value for a multi-vector field!"; + + private final int dims; + protected final MultiDenseVectorSupplier dvSupplier; + + public MultiDenseVectorScriptDocValues(MultiDenseVectorSupplier supplier, int dims) { + super(supplier); + this.dvSupplier = supplier; + this.dims = dims; + } + + public int dims() { + return dims; + } + + private MultiDenseVector getCheckedVector() { + MultiDenseVector vector = dvSupplier.getInternal(); + if (vector == null) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + return vector; + } + + /** + * Get multi-dense vector's value as an array of floats + */ + public Iterator getVectorValues() { + return getCheckedVector().getVectors(); + } + + /** + * Get dense vector's magnitude + */ + public float[] getMagnitudes() { + return getCheckedVector().getMagnitudes(); + } + + @Override + public BytesRef get(int index) { + throw new UnsupportedOperationException( + "accessing a multi-vector field's value through 'get' or 'value' is not supported, use 'vectorValues' or 'magnitudes' instead." + ); + } + + @Override + public int size() { + MultiDenseVector mdv = dvSupplier.getInternal(); + if (mdv != null) { + return mdv.size(); + } + return 0; + } + + public interface MultiDenseVectorSupplier extends Supplier { + @Override + default BytesRef getInternal(int index) { + throw new UnsupportedOperationException(); + } + + MultiDenseVector getInternal(); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiVectorDVLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiVectorDVLeafFieldData.java index cc6fb3827445..b9716d315f33 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiVectorDVLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiVectorDVLeafFieldData.java @@ -9,37 +9,44 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReader; -import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.script.field.DocValuesScriptFieldFactory; +import org.elasticsearch.script.field.vectors.BitMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.ByteMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.FloatMultiDenseVectorDocValuesField; + +import java.io.IOException; final class MultiVectorDVLeafFieldData implements LeafFieldData { private final LeafReader reader; private final String field; - private final IndexVersion indexVersion; private final DenseVectorFieldMapper.ElementType elementType; private final int dims; - MultiVectorDVLeafFieldData( - LeafReader reader, - String field, - IndexVersion indexVersion, - DenseVectorFieldMapper.ElementType elementType, - int dims - ) { + MultiVectorDVLeafFieldData(LeafReader reader, String field, DenseVectorFieldMapper.ElementType elementType, int dims) { this.reader = reader; this.field = field; - this.indexVersion = indexVersion; this.elementType = elementType; this.dims = dims; } @Override public DocValuesScriptFieldFactory getScriptFieldFactory(String name) { - // TODO - return null; + try { + BinaryDocValues values = DocValues.getBinary(reader, field); + BinaryDocValues magnitudeValues = DocValues.getBinary(reader, field + MultiDenseVectorFieldMapper.VECTOR_MAGNITUDES_SUFFIX); + return switch (elementType) { + case BYTE -> new ByteMultiDenseVectorDocValuesField(values, magnitudeValues, name, elementType, dims); + case FLOAT -> new FloatMultiDenseVectorDocValuesField(values, magnitudeValues, name, elementType, dims); + case BIT -> new BitMultiDenseVectorDocValuesField(values, magnitudeValues, name, elementType, dims); + }; + } catch (IOException e) { + throw new IllegalStateException("Cannot load doc values for multi-vector field!", e); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiVectorIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiVectorIndexFieldData.java index 65ef492ce052..44a666e25a61 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiVectorIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/MultiVectorIndexFieldData.java @@ -55,7 +55,7 @@ public ValuesSourceType getValuesSourceType() { @Override public MultiVectorDVLeafFieldData load(LeafReaderContext context) { - return new MultiVectorDVLeafFieldData(context.reader(), fieldName, indexVersion, elementType, dims); + return new MultiVectorDVLeafFieldData(context.reader(), fieldName, elementType, dims); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java index 9d09a7493d60..3db2d164846b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java @@ -84,4 +84,24 @@ public static void decodeDenseVector(IndexVersion indexVersion, BytesRef vectorB } } + public static float[] getMultiMagnitudes(BytesRef magnitudes) { + assert magnitudes.length % Float.BYTES == 0; + float[] multiMagnitudes = new float[magnitudes.length / Float.BYTES]; + ByteBuffer byteBuffer = ByteBuffer.wrap(magnitudes.bytes, magnitudes.offset, magnitudes.length).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < magnitudes.length / Float.BYTES; i++) { + multiMagnitudes[i] = byteBuffer.getFloat(); + } + return multiMagnitudes; + } + + public static void decodeMultiDenseVector(BytesRef vectorBR, int numVectors, float[][] multiVectorValue) { + if (vectorBR == null) { + throw new IllegalArgumentException(MultiDenseVectorScriptDocValues.MISSING_VECTOR_FIELD_MESSAGE); + } + FloatBuffer fb = ByteBuffer.wrap(vectorBR.bytes, vectorBR.offset, vectorBR.length).order(ByteOrder.LITTLE_ENDIAN).asFloatBuffer(); + for (int i = 0; i < numVectors; i++) { + fb.get(multiVectorValue[i]); + } + } + } diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 626875c75a5f..83bca7d27aee 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -112,6 +112,13 @@ public QueryBuilder query() { return query; } + /** + * Returns path to the searched nested object. + */ + public String path() { + return path; + } + /** * Returns inner hit definition in the scope of this query and reusing the defined type and query. */ diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java index 70ba9950f768..d8bd460f6f81 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java @@ -245,7 +245,11 @@ public ShardSnapshotResult getShardSnapshotResult() { } public void ensureNotAborted() { - switch (stage.get()) { + ensureNotAborted(stage.get()); + } + + public static void ensureNotAborted(Stage shardSnapshotStage) { + switch (shardSnapshotStage) { case ABORTED -> throw new AbortedSnapshotException(); case PAUSING -> throw new PausedSnapshotException(); } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 3ac61bbca1a2..27d832241bfe 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; -import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; @@ -79,7 +78,6 @@ import org.elasticsearch.env.ShardLock; import org.elasticsearch.env.ShardLockObtainFailedException; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.CloseUtils; @@ -211,8 +209,6 @@ public class IndicesService extends AbstractLifecycleComponent Setting.Property.NodeScope ); - static final NodeFeature SUPPORTS_AUTO_PUT = new NodeFeature("indices.auto_put_supported"); - /** * The node's settings. */ @@ -910,9 +906,7 @@ public void createShard( .setConcreteIndex(shardRouting.index()) .source(mapping.source().string(), XContentType.JSON); client.execute( - featureService.clusterHasFeature(clusterService.state(), SUPPORTS_AUTO_PUT) - ? TransportAutoPutMappingAction.TYPE - : TransportPutMappingAction.TYPE, + TransportAutoPutMappingAction.TYPE, putMappingRequestAcknowledgedRequest.ackTimeout(TimeValue.MAX_VALUE).masterNodeTimeout(TimeValue.MAX_VALUE), new RefCountAwareThreadedActionListener<>(threadPool.generic(), listener.map(ignored -> null)) ); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 308f1894b78d..c8d31d2060ca 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -397,6 +397,36 @@ record StartRecoveryRequestToSend(StartRecoveryRequest startRecoveryRequest, Str } indexShard.recoverLocallyUpToGlobalCheckpoint(ActionListener.assertOnce(l)); }) + // peer recovery can consume a lot of disk space, so it's worth cleaning up locally ahead of the attempt + // operation runs only if the previous operation succeeded, and returns the previous operation's result. + // Failures at this stage aren't fatal, we can attempt to recover and then clean up again at the end. #104473 + .andThenApply(startingSeqNo -> { + Store.MetadataSnapshot snapshot; + try { + snapshot = indexShard.snapshotStoreMetadata(); + } catch (IOException e) { + // We give up on the contents for any checked exception thrown by snapshotStoreMetadata. We don't want to + // allow those to bubble up and interrupt recovery because the subsequent recovery attempt is expected + // to fix up these problems for us if it completes successfully. + if (e instanceof org.apache.lucene.index.IndexNotFoundException) { + // this is the expected case on first recovery, so don't spam the logs with exceptions + logger.debug(() -> format("no snapshot found for shard %s, treating as empty", indexShard.shardId())); + } else { + logger.warn(() -> format("unable to load snapshot for shard %s, treating as empty", indexShard.shardId()), e); + } + snapshot = Store.MetadataSnapshot.EMPTY; + } + + Store store = indexShard.store(); + store.incRef(); + try { + logger.debug(() -> format("cleaning up index directory for %s before recovery", indexShard.shardId())); + store.cleanupAndVerify("cleanup before peer recovery", snapshot); + } finally { + store.decRef(); + } + return startingSeqNo; + }) // now construct the start-recovery request .andThenApply(startingSeqNo -> { assert startingSeqNo == UNASSIGNED_SEQ_NO || recoveryTarget.state().getStage() == RecoveryState.Stage.TRANSLOG diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index e8b9d18a1dd0..62f923d673dc 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -62,7 +62,6 @@ import org.elasticsearch.cluster.routing.allocation.DiskThresholdMonitor; import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.service.TransportVersionsFixupListener; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.component.LifecycleComponent; @@ -788,9 +787,6 @@ private void construct( if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(new SystemIndexMappingUpdateService(systemIndices, client)); - clusterService.addListener( - new TransportVersionsFixupListener(clusterService, client.admin().cluster(), featureService, threadPool) - ); clusterService.addListener(new NodeFeaturesFixupListener(clusterService, client.admin().cluster(), threadPool)); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 2b95fbc69199..1c4b7cfdab4e 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -283,12 +283,22 @@ public RegisterRepositoryTask(final RepositoriesService repositoriesService, fin @Override public ClusterState execute(ClusterState currentState) { - RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata(request.name(), request.type(), request.settings()); Metadata.Builder mdBuilder = Metadata.builder(currentState.metadata()); RepositoriesMetadata repositories = RepositoriesMetadata.get(currentState); List repositoriesMetadata = new ArrayList<>(repositories.repositories().size() + 1); for (RepositoryMetadata repositoryMetadata : repositories.repositories()) { - if (repositoryMetadata.name().equals(newRepositoryMetadata.name())) { + if (repositoryMetadata.name().equals(request.name())) { + final RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata( + request.name(), + // Copy the UUID from the existing instance rather than resetting it back to MISSING_UUID which would force us to + // re-read the RepositoryData to get it again. In principle the new RepositoryMetadata might point to a different + // underlying repository at this point, but if so that'll cause things to fail in clear ways and eventually (before + // writing anything) we'll read the RepositoryData again and update the UUID in the RepositoryMetadata to match. See + // also #109936. + repositoryMetadata.uuid(), + request.type(), + request.settings() + ); Repository existing = repositoriesService.repositories.get(request.name()); if (existing == null) { existing = repositoriesService.internalRepositories.get(request.name()); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index b43fe05a541f..f1c3d82b74ca 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -191,6 +191,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent implements Repository { private static final Logger logger = LogManager.getLogger(BlobStoreRepository.class); + private class ShutdownLogger { + // Creating a separate logger so that the log-level can be manipulated separately from the parent class. + private static final Logger shutdownLogger = LogManager.getLogger(ShutdownLogger.class); + } + protected volatile RepositoryMetadata metadata; protected final ThreadPool threadPool; @@ -200,6 +205,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp public static final String STATELESS_SHARD_WRITE_THREAD_NAME = "stateless_shard_write"; public static final String STATELESS_CLUSTER_STATE_READ_WRITE_THREAD_NAME = "stateless_cluster_state"; public static final String STATELESS_SHARD_PREWARMING_THREAD_NAME = "stateless_prewarm"; + public static final String SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME = "searchable_snapshots_cache_fetch_async"; + public static final String SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME = "searchable_snapshots_cache_prewarming"; /** * Prefix for the name of the root {@link RepositoryData} blob. @@ -2183,7 +2190,9 @@ private void assertSnapshotOrStatelessPermittedThreadPool() { STATELESS_TRANSLOG_THREAD_NAME, STATELESS_SHARD_WRITE_THREAD_NAME, STATELESS_CLUSTER_STATE_READ_WRITE_THREAD_NAME, - STATELESS_SHARD_PREWARMING_THREAD_NAME + STATELESS_SHARD_PREWARMING_THREAD_NAME, + SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME, + SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME ); } @@ -3467,10 +3476,37 @@ private void doSnapshotShard(SnapshotShardContext context) { } private static void ensureNotAborted(ShardId shardId, SnapshotId snapshotId, IndexShardSnapshotStatus snapshotStatus, String fileName) { + var shardSnapshotStage = snapshotStatus.getStage(); try { - snapshotStatus.ensureNotAborted(); + IndexShardSnapshotStatus.ensureNotAborted(shardSnapshotStage); + + if (shardSnapshotStage != IndexShardSnapshotStatus.Stage.INIT && shardSnapshotStage != IndexShardSnapshotStatus.Stage.STARTED) { + // A normally running shard snapshot should be in stage INIT or STARTED. And we know it's not in PAUSING or ABORTED because + // the ensureNotAborted() call above did not throw. The remaining options don't make sense, if they ever happen. + logger.error( + () -> Strings.format( + "Shard snapshot found an unexpected state. ShardId [{}], SnapshotID [{}], Stage [{}]", + shardId, + snapshotId, + shardSnapshotStage + ) + ); + assert false; + } } catch (Exception e) { - logger.debug("[{}] [{}] {} on the file [{}], exiting", shardId, snapshotId, e.getMessage(), fileName); + // We want to see when a shard snapshot operation checks for and finds an interrupt signal during shutdown. A + // PausedSnapshotException indicates we're in shutdown because that's the only case when shard snapshots are signaled to pause. + // An AbortedSnapshotException may also occur during shutdown if an uncommon error occurs. + ShutdownLogger.shutdownLogger.debug( + () -> Strings.format( + "Shard snapshot operation is aborting. ShardId [%s], SnapshotID [%s], File [%s], Stage [%s]", + shardId, + snapshotId, + fileName, + shardSnapshotStage + ), + e + ); assert e instanceof AbortedSnapshotException || e instanceof PausedSnapshotException : e; throw e; } diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java index 8d546f7aa43f..e72b30526c8e 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -9,13 +9,10 @@ package org.elasticsearch.rest; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; -import java.util.Map; import java.util.Set; import static org.elasticsearch.search.fetch.subphase.highlight.DefaultHighlighter.UNIFIED_HIGHLIGHTER_MATCHED_FIELDS; @@ -29,9 +26,4 @@ public Set getFeatures() { UNIFIED_HIGHLIGHTER_MATCHED_FIELDS ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); - } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index 946931e16636..ca9e4abcaeec 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -11,13 +11,11 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,7 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Set; -import java.util.function.Predicate; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @@ -35,23 +32,14 @@ @ServerlessScope(Scope.INTERNAL) public class RestClusterGetSettingsAction extends BaseRestHandler { - public static final NodeFeature SUPPORTS_GET_SETTINGS_ACTION = new NodeFeature("rest.get_settings_action"); - private final Settings settings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; - private final Predicate clusterSupportsFeature; - public RestClusterGetSettingsAction( - Settings settings, - ClusterSettings clusterSettings, - SettingsFilter settingsFilter, - Predicate clusterSupportsFeature - ) { + public RestClusterGetSettingsAction(Settings settings, ClusterSettings clusterSettings, SettingsFilter settingsFilter) { this.settings = settings; this.clusterSettings = clusterSettings; this.settingsFilter = settingsFilter; - this.clusterSupportsFeature = clusterSupportsFeature; } @Override @@ -72,10 +60,6 @@ private static void setUpRequestParams(MasterNodeReadRequest clusterRequest, public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); - if (clusterSupportsFeature.test(SUPPORTS_GET_SETTINGS_ACTION) == false) { - return prepareLegacyRequest(request, client, renderDefaults); - } - ClusterGetSettingsAction.Request clusterSettingsRequest = new ClusterGetSettingsAction.Request(getMasterNodeTimeout(request)); setUpRequestParams(clusterSettingsRequest, request); @@ -89,29 +73,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC ); } - private RestChannelConsumer prepareLegacyRequest(final RestRequest request, final NodeClient client, final boolean renderDefaults) { - ClusterStateRequest clusterStateRequest = new ClusterStateRequest(getMasterNodeTimeout(request)).routingTable(false).nodes(false); - setUpRequestParams(clusterStateRequest, request); - return channel -> client.admin() - .cluster() - .state( - clusterStateRequest, - new RestToXContentListener(channel).map( - r -> response( - new ClusterGetSettingsAction.Response( - r.getState().metadata().persistentSettings(), - r.getState().metadata().transientSettings(), - r.getState().metadata().settings() - ), - renderDefaults, - settingsFilter, - clusterSettings, - settings - ) - ) - ); - } - @Override protected Set responseParams() { return Settings.FORMAT_PARAMS; diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 3bc1c467323a..7b57481ad571 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -38,6 +38,8 @@ private SearchCapabilities() {} private static final String MULTI_DENSE_VECTOR_FIELD_MAPPER = "multi_dense_vector_field_mapper"; /** Support propagating nested retrievers' inner_hits to top-level compound retrievers . */ private static final String NESTED_RETRIEVER_INNER_HITS_SUPPORT = "nested_retriever_inner_hits_support"; + /** Support multi-dense-vector script field access. */ + private static final String MULTI_DENSE_VECTOR_SCRIPT_ACCESS = "multi_dense_vector_script_access"; public static final Set CAPABILITIES; static { @@ -50,6 +52,7 @@ private SearchCapabilities() {} capabilities.add(NESTED_RETRIEVER_INNER_HITS_SUPPORT); if (MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()) { capabilities.add(MULTI_DENSE_VECTOR_FIELD_MAPPER); + capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_ACCESS); } if (Build.current().isSnapshot()) { capabilities.add(KQL_QUERY_SUPPORTED); diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java new file mode 100644 index 000000000000..24e19a803ff3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.util.BytesRef; + +import java.util.Iterator; + +public class BitMultiDenseVector extends ByteMultiDenseVector { + public BitMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + super(vectorValues, magnitudesBytes, numVecs, dims); + } + + @Override + public void checkDimensions(int qvDims) { + if (qvDims != dims) { + throw new IllegalArgumentException( + "The query vector has a different number of dimensions [" + + qvDims * Byte.SIZE + + "] than the document vectors [" + + dims * Byte.SIZE + + "]." + ); + } + } + + @Override + public int getDims() { + return dims * Byte.SIZE; + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVectorDocValuesField.java new file mode 100644 index 000000000000..35a43eabb8f0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVectorDocValuesField.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.index.BinaryDocValues; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; + +public class BitMultiDenseVectorDocValuesField extends ByteMultiDenseVectorDocValuesField { + + public BitMultiDenseVectorDocValuesField( + BinaryDocValues input, + BinaryDocValues magnitudes, + String name, + ElementType elementType, + int dims + ) { + super(input, magnitudes, name, elementType, dims / 8); + } + + @Override + protected MultiDenseVector getVector() { + return new BitMultiDenseVector(vectorValue, magnitudesValue, numVecs, dims); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java new file mode 100644 index 000000000000..e610d10146b2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder; + +import java.util.Iterator; + +public class ByteMultiDenseVector implements MultiDenseVector { + + protected final Iterator vectorValues; + protected final int numVecs; + protected final int dims; + + private Iterator floatDocVectors; + private float[] magnitudes; + private final BytesRef magnitudesBytes; + + public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + assert magnitudesBytes.length == numVecs * Float.BYTES; + this.vectorValues = vectorValues; + this.numVecs = numVecs; + this.dims = dims; + this.magnitudesBytes = magnitudesBytes; + } + + @Override + public Iterator getVectors() { + if (floatDocVectors == null) { + floatDocVectors = new ByteToFloatIteratorWrapper(vectorValues, dims); + } + return floatDocVectors; + } + + @Override + public float[] getMagnitudes() { + if (magnitudes == null) { + magnitudes = VectorEncoderDecoder.getMultiMagnitudes(magnitudesBytes); + } + return magnitudes; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int getDims() { + return dims; + } + + @Override + public int size() { + return numVecs; + } + + static class ByteToFloatIteratorWrapper implements Iterator { + private final Iterator byteIterator; + private final float[] buffer; + private final int dims; + + ByteToFloatIteratorWrapper(Iterator byteIterator, int dims) { + this.byteIterator = byteIterator; + this.buffer = new float[dims]; + this.dims = dims; + } + + @Override + public boolean hasNext() { + return byteIterator.hasNext(); + } + + @Override + public float[] next() { + byte[] next = byteIterator.next(); + for (int i = 0; i < dims; i++) { + buffer[i] = next[i]; + } + return buffer; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java new file mode 100644 index 000000000000..d1e062e0a3de --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValues; + +import java.io.IOException; +import java.util.Iterator; + +public class ByteMultiDenseVectorDocValuesField extends MultiDenseVectorDocValuesField { + + protected final BinaryDocValues input; + private final BinaryDocValues magnitudes; + protected final int dims; + protected int numVecs; + protected Iterator vectorValue; + protected boolean decoded; + protected BytesRef value; + protected BytesRef magnitudesValue; + private byte[] buffer; + + public ByteMultiDenseVectorDocValuesField( + BinaryDocValues input, + BinaryDocValues magnitudes, + String name, + ElementType elementType, + int dims + ) { + super(name, elementType); + this.input = input; + this.dims = dims; + this.buffer = new byte[dims]; + this.magnitudes = magnitudes; + } + + @Override + public void setNextDocId(int docId) throws IOException { + decoded = false; + if (input.advanceExact(docId)) { + boolean magnitudesFound = magnitudes.advanceExact(docId); + assert magnitudesFound; + value = input.binaryValue(); + assert value.length % dims == 0; + numVecs = value.length / dims; + magnitudesValue = magnitudes.binaryValue(); + assert magnitudesValue.length == (numVecs * Float.BYTES); + } else { + value = null; + magnitudesValue = null; + vectorValue = null; + numVecs = 0; + } + } + + @Override + public MultiDenseVectorScriptDocValues toScriptDocValues() { + return new MultiDenseVectorScriptDocValues(this, dims); + } + + protected MultiDenseVector getVector() { + return new ByteMultiDenseVector(vectorValue, magnitudesValue, numVecs, dims); + } + + @Override + public MultiDenseVector get() { + if (isEmpty()) { + return MultiDenseVector.EMPTY; + } + decodeVectorIfNecessary(); + return getVector(); + } + + @Override + public MultiDenseVector get(MultiDenseVector defaultValue) { + if (isEmpty()) { + return defaultValue; + } + decodeVectorIfNecessary(); + return getVector(); + } + + @Override + public MultiDenseVector getInternal() { + return get(null); + } + + private void decodeVectorIfNecessary() { + if (decoded == false && value != null) { + vectorValue = new ByteVectorIterator(value, buffer, numVecs); + decoded = true; + } + } + + @Override + public int size() { + return value == null ? 0 : value.length / dims; + } + + @Override + public boolean isEmpty() { + return value == null; + } + + static class ByteVectorIterator implements Iterator { + private final byte[] buffer; + private final BytesRef vectorValues; + private final int size; + private int idx = 0; + + ByteVectorIterator(BytesRef vectorValues, byte[] buffer, int size) { + assert vectorValues.length == (buffer.length * size); + this.vectorValues = vectorValues; + this.size = size; + this.buffer = buffer; + } + + @Override + public boolean hasNext() { + return idx < size; + } + + @Override + public byte[] next() { + if (hasNext() == false) { + throw new IllegalArgumentException("No more elements in the iterator"); + } + System.arraycopy(vectorValues.bytes, vectorValues.offset + idx * buffer.length, buffer, 0, buffer.length); + idx++; + return buffer; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java new file mode 100644 index 000000000000..9ffe8b3b970c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.util.BytesRef; + +import java.util.Iterator; + +import static org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder.getMultiMagnitudes; + +public class FloatMultiDenseVector implements MultiDenseVector { + + private final BytesRef magnitudes; + private float[] magnitudesArray = null; + private final int dims; + private final int numVectors; + private final Iterator decodedDocVector; + + public FloatMultiDenseVector(Iterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { + assert magnitudes.length == numVectors * Float.BYTES; + this.decodedDocVector = decodedDocVector; + this.magnitudes = magnitudes; + this.numVectors = numVectors; + this.dims = dims; + } + + @Override + public Iterator getVectors() { + return decodedDocVector; + } + + @Override + public float[] getMagnitudes() { + if (magnitudesArray == null) { + magnitudesArray = getMultiMagnitudes(magnitudes); + } + return magnitudesArray; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int getDims() { + return dims; + } + + @Override + public int size() { + return numVectors; + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java new file mode 100644 index 000000000000..356db58d989c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java @@ -0,0 +1,143 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValues; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +import java.util.Iterator; + +public class FloatMultiDenseVectorDocValuesField extends MultiDenseVectorDocValuesField { + + private final BinaryDocValues input; + private final BinaryDocValues magnitudes; + private boolean decoded; + private final int dims; + private BytesRef value; + private BytesRef magnitudesValue; + private FloatVectorIterator vectorValues; + private int numVectors; + private float[] buffer; + + public FloatMultiDenseVectorDocValuesField( + BinaryDocValues input, + BinaryDocValues magnitudes, + String name, + ElementType elementType, + int dims + ) { + super(name, elementType); + this.input = input; + this.magnitudes = magnitudes; + this.dims = dims; + this.buffer = new float[dims]; + } + + @Override + public void setNextDocId(int docId) throws IOException { + decoded = false; + if (input.advanceExact(docId)) { + boolean magnitudesFound = magnitudes.advanceExact(docId); + assert magnitudesFound; + + value = input.binaryValue(); + assert value.length % (Float.BYTES * dims) == 0; + numVectors = value.length / (Float.BYTES * dims); + magnitudesValue = magnitudes.binaryValue(); + assert magnitudesValue.length == (Float.BYTES * numVectors); + } else { + value = null; + magnitudesValue = null; + numVectors = 0; + } + } + + @Override + public MultiDenseVectorScriptDocValues toScriptDocValues() { + return new MultiDenseVectorScriptDocValues(this, dims); + } + + @Override + public boolean isEmpty() { + return value == null; + } + + @Override + public MultiDenseVector get() { + if (isEmpty()) { + return MultiDenseVector.EMPTY; + } + decodeVectorIfNecessary(); + return new FloatMultiDenseVector(vectorValues, magnitudesValue, numVectors, dims); + } + + @Override + public MultiDenseVector get(MultiDenseVector defaultValue) { + if (isEmpty()) { + return defaultValue; + } + decodeVectorIfNecessary(); + return new FloatMultiDenseVector(vectorValues, magnitudesValue, numVectors, dims); + } + + @Override + public MultiDenseVector getInternal() { + return get(null); + } + + @Override + public int size() { + return value == null ? 0 : value.length / (Float.BYTES * dims); + } + + private void decodeVectorIfNecessary() { + if (decoded == false && value != null) { + vectorValues = new FloatVectorIterator(value, buffer, numVectors); + decoded = true; + } + } + + static class FloatVectorIterator implements Iterator { + private final float[] buffer; + private final FloatBuffer vectorValues; + private final int size; + private int idx = 0; + + FloatVectorIterator(BytesRef vectorValues, float[] buffer, int size) { + assert vectorValues.length == (buffer.length * Float.BYTES * size); + this.vectorValues = ByteBuffer.wrap(vectorValues.bytes, vectorValues.offset, vectorValues.length) + .order(ByteOrder.LITTLE_ENDIAN) + .asFloatBuffer(); + this.size = size; + this.buffer = buffer; + } + + @Override + public boolean hasNext() { + return idx < size; + } + + @Override + public float[] next() { + if (hasNext() == false) { + throw new IllegalArgumentException("No more elements in the iterator"); + } + vectorValues.get(buffer); + idx++; + return buffer; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java new file mode 100644 index 000000000000..85c851dbe545 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import java.util.Iterator; + +public interface MultiDenseVector { + + default void checkDimensions(int qvDims) { + checkDimensions(getDims(), qvDims); + } + + Iterator getVectors(); + + float[] getMagnitudes(); + + boolean isEmpty(); + + int getDims(); + + int size(); + + static void checkDimensions(int dvDims, int qvDims) { + if (dvDims != qvDims) { + throw new IllegalArgumentException( + "The query vector has a different number of dimensions [" + qvDims + "] than the document vectors [" + dvDims + "]." + ); + } + } + + private static String badQueryVectorType(Object queryVector) { + return "Cannot use vector [" + queryVector + "] with class [" + queryVector.getClass().getName() + "] as query vector"; + } + + MultiDenseVector EMPTY = new MultiDenseVector() { + public static final String MISSING_VECTOR_FIELD_MESSAGE = "Multi Dense vector value missing for a field," + + " use isEmpty() to check for a missing vector value"; + + @Override + public Iterator getVectors() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float[] getMagnitudes() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public boolean isEmpty() { + return true; + } + + @Override + public int getDims() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public int size() { + return 0; + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVectorDocValuesField.java new file mode 100644 index 000000000000..61ae4304683c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVectorDocValuesField.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValues; +import org.elasticsearch.script.field.AbstractScriptFieldFactory; +import org.elasticsearch.script.field.DocValuesScriptFieldFactory; +import org.elasticsearch.script.field.Field; + +import java.util.Iterator; + +import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; + +public abstract class MultiDenseVectorDocValuesField extends AbstractScriptFieldFactory + implements + Field, + DocValuesScriptFieldFactory, + MultiDenseVectorScriptDocValues.MultiDenseVectorSupplier { + protected final String name; + protected final ElementType elementType; + + public MultiDenseVectorDocValuesField(String name, ElementType elementType) { + this.name = name; + this.elementType = elementType; + } + + @Override + public String getName() { + return name; + } + + public ElementType getElementType() { + return elementType; + } + + /** + * Get the DenseVector for a document if one exists, DenseVector.EMPTY otherwise + */ + public abstract MultiDenseVector get(); + + public abstract MultiDenseVector get(MultiDenseVector defaultValue); + + public abstract MultiDenseVectorScriptDocValues toScriptDocValues(); + + // DenseVector fields are single valued, so Iterable does not make sense. + @Override + public Iterator iterator() { + throw new UnsupportedOperationException("Cannot iterate over single valued multi_dense_vector field, use get() instead"); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 6bc667d4359b..546586a9ff3c 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -191,7 +191,16 @@ protected SearchHit nextDoc(int doc) throws IOException { } }; - SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad); + SearchHit[] hits = docsIterator.iterate( + context.shardTarget(), + context.searcher().getIndexReader(), + docIdsToLoad, + context.request().allowPartialSearchResults() + ); + + if (docsIterator.isTimedOut()) { + context.queryResult().searchTimedOut(true); + } if (context.isCancelled()) { for (SearchHit hit : hits) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java index 682ee4b37566..df4e7649ffd3 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java @@ -13,7 +13,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.query.SearchTimeoutException; import java.io.IOException; import java.util.Arrays; @@ -27,6 +30,12 @@ */ abstract class FetchPhaseDocsIterator { + private boolean timedOut = false; + + public boolean isTimedOut() { + return timedOut; + } + /** * Called when a new leaf reader is reached * @param ctx the leaf reader for this set of doc ids @@ -44,7 +53,7 @@ abstract class FetchPhaseDocsIterator { /** * Iterate over a set of docsIds within a particular shard and index reader */ - public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader indexReader, int[] docIds) { + public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader indexReader, int[] docIds, boolean allowPartialResults) { SearchHit[] searchHits = new SearchHit[docIds.length]; DocIdToIndex[] docs = new DocIdToIndex[docIds.length]; for (int index = 0; index < docIds.length; index++) { @@ -58,30 +67,55 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde LeafReaderContext ctx = indexReader.leaves().get(leafOrd); int endReaderIdx = endReaderIdx(ctx, 0, docs); int[] docsInLeaf = docIdsInLeaf(0, endReaderIdx, docs, ctx.docBase); - setNextReader(ctx, docsInLeaf); - for (int i = 0; i < docs.length; i++) { - if (i >= endReaderIdx) { - leafOrd = ReaderUtil.subIndex(docs[i].docId, indexReader.leaves()); - ctx = indexReader.leaves().get(leafOrd); - endReaderIdx = endReaderIdx(ctx, i, docs); - docsInLeaf = docIdsInLeaf(i, endReaderIdx, docs, ctx.docBase); - setNextReader(ctx, docsInLeaf); + try { + setNextReader(ctx, docsInLeaf); + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + if (allowPartialResults) { + timedOut = true; + return SearchHits.EMPTY; } - currentDoc = docs[i].docId; - assert searchHits[docs[i].index] == null; - searchHits[docs[i].index] = nextDoc(docs[i].docId); + throw new SearchTimeoutException(shardTarget, "Time exceeded"); } - } catch (Exception e) { - for (SearchHit searchHit : searchHits) { - if (searchHit != null) { - searchHit.decRef(); + for (int i = 0; i < docs.length; i++) { + try { + if (i >= endReaderIdx) { + leafOrd = ReaderUtil.subIndex(docs[i].docId, indexReader.leaves()); + ctx = indexReader.leaves().get(leafOrd); + endReaderIdx = endReaderIdx(ctx, i, docs); + docsInLeaf = docIdsInLeaf(i, endReaderIdx, docs, ctx.docBase); + setNextReader(ctx, docsInLeaf); + } + currentDoc = docs[i].docId; + assert searchHits[docs[i].index] == null; + searchHits[docs[i].index] = nextDoc(docs[i].docId); + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + if (allowPartialResults) { + timedOut = true; + SearchHit[] partialSearchHits = new SearchHit[i]; + System.arraycopy(searchHits, 0, partialSearchHits, 0, i); + return partialSearchHits; + } + purgeSearchHits(searchHits); + throw new SearchTimeoutException(shardTarget, "Time exceeded"); } } + } catch (SearchTimeoutException e) { + throw e; + } catch (Exception e) { + purgeSearchHits(searchHits); throw new FetchPhaseExecutionException(shardTarget, "Error running fetch phase for doc [" + currentDoc + "]", e); } return searchHits; } + private static void purgeSearchHits(SearchHit[] searchHits) { + for (SearchHit searchHit : searchHits) { + if (searchHit != null) { + searchHit.decRef(); + } + } + } + private static int endReaderIdx(LeafReaderContext currentReaderContext, int index, DocIdToIndex[] docs) { int firstInNextReader = currentReaderContext.docBase + currentReaderContext.reader().maxDoc(); int i = index + 1; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index e14177adba46..a8db0f26d296 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -64,7 +64,6 @@ public abstract class AbstractHighlighterBuilder> BiFunction {}, FORCE_SOURCE_FIELD); // force_source is ignored parser.declareInt(HB::phraseLimit, PHRASE_LIMIT_FIELD); parser.declareInt(HB::maxAnalyzedOffset, MAX_ANALYZED_OFFSET_FIELD); parser.declareObject(HB::options, (XContentParser p, Void c) -> { diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 5cd8935f7240..3955fc87bf39 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -7,13 +7,12 @@ # License v3.0 only", or the "Server Side Public License, v 1". # +org.elasticsearch.action.admin.indices.stats.IndicesStatsFeatures org.elasticsearch.action.bulk.BulkFeatures org.elasticsearch.features.FeatureInfrastructureFeatures org.elasticsearch.health.HealthFeatures -org.elasticsearch.cluster.service.TransportFeatures org.elasticsearch.cluster.metadata.MetadataFeatures org.elasticsearch.rest.RestFeatures -org.elasticsearch.indices.IndicesFeatures org.elasticsearch.repositories.RepositoriesFeatures org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt index f9a8237d6371..69aa5102dec8 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt @@ -44,3 +44,4 @@ FORMING_SINGLE_NODE_CLUSTERS modules-discover CIRCUIT_BREAKER_ERRORS circuit-breaker-errors.html ALLOCATION_EXPLAIN_NO_COPIES cluster-allocation-explain.html#no-valid-shard-copy ALLOCATION_EXPLAIN_MAX_RETRY cluster-allocation-explain.html#maximum-number-of-retries-exceeded +SECURE_SETTINGS secure-settings.html diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 0b9cba837583..5cf7b438b41a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -42,6 +42,7 @@ import java.io.IOException; import java.io.Reader; +import java.util.Arrays; import java.util.List; import java.util.Map; @@ -250,6 +251,32 @@ public void testFillsAttributes() throws IOException { assertEquals("", tokens.get(3).getType()); } + public void testAnalyzerWithTwoTextsAndNoIndexName() throws IOException { + AnalyzeAction.Request request = new AnalyzeAction.Request(); + + for (String analyzer : Arrays.asList("standard", "simple", "stop", "keyword", "whitespace", "classic")) { + request.analyzer(analyzer); + request.text("a a", "b b"); + + AnalyzeAction.Response analyzeIndex = TransportAnalyzeAction.analyze(request, registry, mockIndexService(), maxTokenCount); + List tokensIndex = analyzeIndex.getTokens(); + + AnalyzeAction.Response analyzeNoIndex = TransportAnalyzeAction.analyze(request, registry, null, maxTokenCount); + List tokensNoIndex = analyzeNoIndex.getTokens(); + + assertEquals(tokensIndex.size(), tokensNoIndex.size()); + for (int i = 0; i < tokensIndex.size(); i++) { + AnalyzeAction.AnalyzeToken withIndex = tokensIndex.get(i); + AnalyzeAction.AnalyzeToken withNoIndex = tokensNoIndex.get(i); + + assertEquals(withIndex.getStartOffset(), withNoIndex.getStartOffset()); + assertEquals(withIndex.getEndOffset(), withNoIndex.getEndOffset()); + assertEquals(withIndex.getPosition(), withNoIndex.getPosition()); + assertEquals(withIndex.getType(), withNoIndex.getType()); + } + } + } + public void testWithIndexAnalyzers() throws IOException { AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 762a7e0f47ca..dda20dfb37e9 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -8,35 +8,65 @@ */ package org.elasticsearch.action.search; +import org.apache.lucene.document.Document; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.MockDirectoryWrapper; +import org.apache.lucene.util.Accountable; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.mapper.IdLoader; +import org.elasticsearch.index.mapper.MapperMetrics; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.transport.Transport; +import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -749,4 +779,159 @@ private static void addProfiling(boolean profiled, QuerySearchResult queryResult private static ProfileResult fetchProfile(boolean profiled) { return profiled ? new ProfileResult("fetch", "fetch", Map.of(), Map.of(), FETCH_PROFILE_TIME, List.of()) : null; } + + public void testFetchTimeoutWithPartialResults() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + w.addDocument(new Document()); + w.addDocument(new Document()); + w.addDocument(new Document()); + IndexReader r = w.getReader(); + w.close(); + ContextIndexSearcher contextIndexSearcher = createSearcher(r); + try (SearchContext searchContext = createSearchContext(contextIndexSearcher, true)) { + FetchPhase fetchPhase = createFetchPhase(contextIndexSearcher); + fetchPhase.execute(searchContext, new int[] { 0, 1, 2 }, null); + assertTrue(searchContext.queryResult().searchTimedOut()); + assertEquals(1, searchContext.fetchResult().hits().getHits().length); + } finally { + r.close(); + dir.close(); + } + } + + public void testFetchTimeoutNoPartialResults() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + w.addDocument(new Document()); + w.addDocument(new Document()); + w.addDocument(new Document()); + IndexReader r = w.getReader(); + w.close(); + ContextIndexSearcher contextIndexSearcher = createSearcher(r); + + try (SearchContext searchContext = createSearchContext(contextIndexSearcher, false)) { + FetchPhase fetchPhase = createFetchPhase(contextIndexSearcher); + expectThrows(SearchTimeoutException.class, () -> fetchPhase.execute(searchContext, new int[] { 0, 1, 2 }, null)); + assertNull(searchContext.fetchResult().hits()); + } finally { + r.close(); + dir.close(); + } + } + + private static ContextIndexSearcher createSearcher(IndexReader reader) throws IOException { + return new ContextIndexSearcher(reader, null, null, new QueryCachingPolicy() { + @Override + public void onUse(Query query) {} + + @Override + public boolean shouldCache(Query query) { + return false; + } + }, randomBoolean()); + } + + private static FetchPhase createFetchPhase(ContextIndexSearcher contextIndexSearcher) { + return new FetchPhase(Collections.singletonList(fetchContext -> new FetchSubPhaseProcessor() { + boolean processCalledOnce = false; + + @Override + public void setNextReader(LeafReaderContext readerContext) {} + + @Override + public void process(FetchSubPhase.HitContext hitContext) { + // we throw only once one doc has been fetched, so we can test partial results are returned + if (processCalledOnce) { + contextIndexSearcher.throwTimeExceededException(); + } else { + processCalledOnce = true; + } + } + + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NO_REQUIREMENTS; + } + })); + } + + private static SearchContext createSearchContext(ContextIndexSearcher contextIndexSearcher, boolean allowPartialResults) { + IndexSettings indexSettings = new IndexSettings( + IndexMetadata.builder("index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .creationDate(System.currentTimeMillis()) + .build(), + Settings.EMPTY + ); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) { + + } + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) { + + } + }); + + SearchExecutionContext searchExecutionContext = new SearchExecutionContext( + 0, + 0, + indexSettings, + bitsetFilterCache, + null, + null, + MappingLookup.EMPTY, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + Collections.emptyMap(), + null, + MapperMetrics.NOOP + ); + TestSearchContext searchContext = new TestSearchContext(searchExecutionContext, null, contextIndexSearcher) { + private final FetchSearchResult fetchSearchResult = new FetchSearchResult(); + private final ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + new SearchRequest().allowPartialSearchResults(allowPartialResults), + new ShardId("index", "indexUUID", 0), + 0, + 1, + AliasFilter.EMPTY, + 1f, + 0L, + null + ); + + @Override + public IdLoader newIdLoader() { + return new IdLoader.StoredIdLoader(); + } + + @Override + public FetchSearchResult fetchResult() { + return fetchSearchResult; + } + + @Override + public ShardSearchRequest request() { + return request; + } + }; + searchContext.addReleasable(searchContext.fetchResult()::decRef); + searchContext.setTask(new SearchShardTask(-1, "type", "action", "description", null, Collections.emptyMap())); + return searchContext; + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index 21b30557cafe..e5786b1b3449 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -19,6 +20,7 @@ import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -27,16 +29,22 @@ public class IndexRoutingTableTests extends ESTestCase { public void testReadyForSearch() { - innerReadyForSearch(false); - innerReadyForSearch(true); + innerReadyForSearch(false, false); + innerReadyForSearch(false, true); + innerReadyForSearch(true, false); + innerReadyForSearch(true, true); } - private void innerReadyForSearch(boolean fastRefresh) { + // TODO: remove if (fastRefresh && beforeFastRefreshRCO) branches (ES-9563) + private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshRCO) { Index index = new Index(randomIdentifier(), UUIDs.randomBase64UUID()); ClusterState clusterState = mock(ClusterState.class, Mockito.RETURNS_DEEP_STUBS); when(clusterState.metadata().index(any(Index.class)).getSettings()).thenReturn( Settings.builder().put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh).build() ); + when(clusterState.getMinTransportVersion()).thenReturn( + beforeFastRefreshRCO ? TransportVersion.fromId(FAST_REFRESH_RCO_2.id() - 1_00_0) : TransportVersion.current() + ); // 2 primaries that are search and index ShardId p1 = new ShardId(index, 0); IndexShardRoutingTable shardTable1 = new IndexShardRoutingTable( @@ -55,7 +63,7 @@ private void innerReadyForSearch(boolean fastRefresh) { shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -91,7 +99,7 @@ private void innerReadyForSearch(boolean fastRefresh) { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -118,8 +126,6 @@ private void innerReadyForSearch(boolean fastRefresh) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); // 2 unassigned primaries that are index only with some replicas that are all available - // Fast refresh indices do not support replicas so this can not practically happen. If we add support we will want to ensure - // that readyForSearch allows for searching replicas when the index shard is not available. shardTable1 = new IndexShardRoutingTable( p1, List.of( @@ -137,8 +143,8 @@ private void innerReadyForSearch(boolean fastRefresh) { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); // if we support replicas for fast refreshes this needs to change + if (fastRefresh && beforeFastRefreshRCO) { + assertFalse(indexRoutingTable.readyForSearch(clusterState)); } else { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java deleted file mode 100644 index 9eec8309bbb8..000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.TransportVersionsFixupListener.NodeTransportVersionTask; -import org.elasticsearch.cluster.version.CompatibilityVersions; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.indices.SystemIndexDescriptor; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; -import org.mockito.ArgumentCaptor; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; - -import static java.util.Map.entry; -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.everyItem; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -public class TransportVersionsFixupListenerTests extends ESTestCase { - - private static final Version NEXT_VERSION = Version.V_8_8_1; - private static final TransportVersion NEXT_TRANSPORT_VERSION = TransportVersion.fromId(NEXT_VERSION.id); - - @SuppressWarnings("unchecked") - private static MasterServiceTaskQueue newMockTaskQueue() { - return mock(MasterServiceTaskQueue.class); - } - - private static DiscoveryNodes node(Version... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add(DiscoveryNodeUtils.create("node" + i, new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i), versions[i])); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - @SafeVarargs - private static Map versions(T... versions) { - Map tvs = new HashMap<>(); - for (int i = 0; i < versions.length; i++) { - tvs.put("node" + i, versions[i]); - } - return tvs; - } - - private static NodesInfoResponse getResponse(Map responseData) { - return new NodesInfoResponse( - ClusterName.DEFAULT, - responseData.entrySet() - .stream() - .map( - e -> new NodeInfo( - "", - e.getValue(), - null, - null, - null, - DiscoveryNodeUtils.create(e.getKey(), new TransportAddress(TransportAddress.META_ADDRESS, 9200)), - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ) - ) - .toList(), - List.of() - ); - } - - public void testNothingFixedWhenNothingToInfer() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.V_8_8_0)) - .nodeIdsToCompatibilityVersions(versions(new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of()))) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testNothingFixedWhenOnNextVersion() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION)) - .nodeIdsToCompatibilityVersions(versions(new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of()))) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testNothingFixedWhenOnPreviousVersion() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.V_8_7_0, Version.V_8_8_0)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(TransportVersions.V_8_7_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - @SuppressWarnings("unchecked") - public void testVersionsAreFixed() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.forClass(ActionListener.class); - ArgumentCaptor task = ArgumentCaptor.forClass(NodeTransportVersionTask.class); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).nodesInfo( - argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - action.getValue() - .onResponse( - getResponse( - Map.ofEntries( - entry("node1", new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of())), - entry("node2", new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of())) - ) - ) - ); - verify(taskQueue).submitTask(anyString(), task.capture(), any()); - - assertThat(task.getValue().results().keySet(), equalTo(Set.of("node1", "node2"))); - assertThat(task.getValue().results().values(), everyItem(equalTo(NEXT_TRANSPORT_VERSION))); - } - - public void testConcurrentChangesDoNotOverlap() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client).nodesInfo(argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), any()); - // don't send back the response yet - - ClusterState testState2 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - // should not send any requests - listeners.clusterChanged(new ClusterChangedEvent("test", testState2, testState1)); - verifyNoMoreInteractions(client); - } - - @SuppressWarnings("unchecked") - public void testFailedRequestsAreRetried() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Scheduler scheduler = mock(Scheduler.class); - Executor executor = mock(Executor.class); - - var compatibilityVersions = new CompatibilityVersions( - TransportVersion.current(), - Map.of(".system-index-1", new SystemIndexDescriptor.MappingsVersion(1, 1234)) - ); - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeIdsToCompatibilityVersions( - Map.ofEntries( - entry("node0", compatibilityVersions), - entry("node1", new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of())), - entry("node2", new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of())) - ) - ) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.forClass(ActionListener.class); - ArgumentCaptor retry = ArgumentCaptor.forClass(Runnable.class); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - scheduler, - executor - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client, times(1)).nodesInfo(any(), action.capture()); - // do response immediately - action.getValue().onFailure(new RuntimeException("failure")); - verify(scheduler).schedule(retry.capture(), any(), same(executor)); - - // running retry should cause another check - retry.getValue().run(); - verify(client, times(2)).nodesInfo( - argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - any() - ); - } -} diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index 6713042002fa..07aa9af3b403 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -1085,12 +1084,8 @@ static ClusterState createClusterState( Collection nodes, Map> indexNameToNodeIdsMap ) { - Map> features = new HashMap<>(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); - for (DiscoveryNode node : nodes) { - nodesBuilder = nodesBuilder.add(node); - features.put(node.getId(), Set.of(HealthFeatures.SUPPORTS_HEALTH.id())); - } + nodes.forEach(nodesBuilder::add); nodesBuilder.localNodeId(randomFrom(nodes).getId()); nodesBuilder.masterNodeId(randomFrom(nodes).getId()); ClusterBlocks.Builder clusterBlocksBuilder = new ClusterBlocks.Builder(); @@ -1125,7 +1120,6 @@ static ClusterState createClusterState( state.metadata(metadata.generateClusterUuidIfNeeded().build()); state.routingTable(routingTable.build()); state.blocks(clusterBlocksBuilder); - state.nodeFeatures(features); return state.build(); } diff --git a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java index 7a578650b7cb..15ef2e150761 100644 --- a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; @@ -451,11 +450,7 @@ private ClusterState createClusterState( metadata.put(idxMetadata); } - var features = Set.of(HealthFeatures.SUPPORTS_SHARDS_CAPACITY_INDICATOR.id()); - return ClusterState.builder(clusterState) - .metadata(metadata) - .nodeFeatures(Map.of(dataNode.getId(), features, frozenNode.getId(), features)) - .build(); + return ClusterState.builder(clusterState).metadata(metadata).build(); } private static IndexMetadata.Builder createIndexInDataNode(int shards) { diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 77ab66516692..997cb123dbf8 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -48,7 +48,6 @@ import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.cluster.node.DiscoveryNode.STATELESS_ENABLED_SETTING_NAME; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.elasticsearch.index.cache.bitset.BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -253,35 +252,21 @@ public void testShouldLoadRandomAccessFiltersEagerly() { for (var hasIndexRole : values) { for (var loadFiltersEagerly : values) { for (var isStateless : values) { - for (var fastRefresh : values) { - if (isStateless == false && fastRefresh) { - // fast refresh is only relevant for stateless indices - continue; - } - - boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( - bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly, fastRefresh) - ); - if (isStateless) { - assertEquals(loadFiltersEagerly && ((hasIndexRole && fastRefresh) || hasIndexRole == false), result); - } else { - assertEquals(loadFiltersEagerly, result); - } + boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( + bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly) + ); + if (isStateless) { + assertEquals(loadFiltersEagerly && hasIndexRole == false, result); + } else { + assertEquals(loadFiltersEagerly, result); } } } } } - private IndexSettings bitsetFilterCacheSettings( - boolean isStateless, - boolean hasIndexRole, - boolean loadFiltersEagerly, - boolean fastRefresh - ) { - var indexSettingsBuilder = Settings.builder() - .put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly) - .put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh); + private IndexSettings bitsetFilterCacheSettings(boolean isStateless, boolean hasIndexRole, boolean loadFiltersEagerly) { + var indexSettingsBuilder = Settings.builder().put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly); var nodeSettingsBuilder = Settings.builder() .putList( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java new file mode 100644 index 000000000000..435baa477e74 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java @@ -0,0 +1,374 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.vectors; + +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import org.elasticsearch.script.field.vectors.ByteMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.FloatMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.MultiDenseVector; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.Iterator; + +import static org.hamcrest.Matchers.containsString; + +public class MultiDenseVectorScriptDocValuesTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testFloatGetVectorValueAndGetMagnitude() throws IOException { + int dims = 3; + float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; + float[][] expectedMagnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; + + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); + BinaryDocValues magnitudeValues = wrap(expectedMagnitudes); + MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.FLOAT, + dims + ); + MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + assertEquals(vectors[i].length, field.size()); + assertEquals(dims, scriptDocValues.dims()); + Iterator iterator = scriptDocValues.getVectorValues(); + float[] magnitudes = scriptDocValues.getMagnitudes(); + assertEquals(expectedMagnitudes[i].length, magnitudes.length); + for (int j = 0; j < vectors[i].length; j++) { + assertTrue(iterator.hasNext()); + assertArrayEquals(vectors[i][j], iterator.next(), 0.0001f); + assertEquals(expectedMagnitudes[i][j], magnitudes[j], 0.0001f); + } + } + } + + public void testByteGetVectorValueAndGetMagnitude() throws IOException { + int dims = 3; + float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; + float[][] expectedMagnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; + + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); + BinaryDocValues magnitudeValues = wrap(expectedMagnitudes); + MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.BYTE, + dims + ); + MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + assertEquals(vectors[i].length, field.size()); + assertEquals(dims, scriptDocValues.dims()); + Iterator iterator = scriptDocValues.getVectorValues(); + float[] magnitudes = scriptDocValues.getMagnitudes(); + assertEquals(expectedMagnitudes[i].length, magnitudes.length); + for (int j = 0; j < vectors[i].length; j++) { + assertTrue(iterator.hasNext()); + assertArrayEquals(vectors[i][j], iterator.next(), 0.0001f); + assertEquals(expectedMagnitudes[i][j], magnitudes[j], 0.0001f); + } + } + } + + public void testFloatMetadataAndIterator() throws IOException { + int dims = 3; + float[][][] vectors = new float[][][] { fill(new float[3][dims], ElementType.FLOAT), fill(new float[2][dims], ElementType.FLOAT) }; + float[][] magnitudes = new float[][] { new float[3], new float[2] }; + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); + BinaryDocValues magnitudeValues = wrap(magnitudes); + + MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.FLOAT, + dims + ); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + MultiDenseVector dv = field.get(); + assertEquals(vectors[i].length, dv.size()); + assertFalse(dv.isEmpty()); + assertEquals(dims, dv.getDims()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, field::iterator); + assertEquals("Cannot iterate over single valued multi_dense_vector field, use get() instead", e.getMessage()); + } + field.setNextDocId(vectors.length); + MultiDenseVector dv = field.get(); + assertEquals(dv, MultiDenseVector.EMPTY); + } + + public void testByteMetadataAndIterator() throws IOException { + int dims = 3; + float[][][] vectors = new float[][][] { fill(new float[3][dims], ElementType.BYTE), fill(new float[2][dims], ElementType.BYTE) }; + float[][] magnitudes = new float[][] { new float[3], new float[2] }; + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); + BinaryDocValues magnitudeValues = wrap(magnitudes); + MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.BYTE, + dims + ); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + MultiDenseVector dv = field.get(); + assertEquals(vectors[i].length, dv.size()); + assertFalse(dv.isEmpty()); + assertEquals(dims, dv.getDims()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, field::iterator); + assertEquals("Cannot iterate over single valued multi_dense_vector field, use get() instead", e.getMessage()); + } + field.setNextDocId(vectors.length); + MultiDenseVector dv = field.get(); + assertEquals(dv, MultiDenseVector.EMPTY); + } + + protected float[][] fill(float[][] vectors, ElementType elementType) { + for (float[] vector : vectors) { + for (int i = 0; i < vector.length; i++) { + vector[i] = elementType == ElementType.FLOAT ? randomFloat() : randomByte(); + } + } + return vectors; + } + + public void testFloatMissingValues() throws IOException { + int dims = 3; + float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; + float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); + BinaryDocValues magnitudeValues = wrap(magnitudes); + MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.FLOAT, + dims + ); + MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); + + field.setNextDocId(3); + assertEquals(0, field.size()); + Exception e = expectThrows(IllegalArgumentException.class, scriptDocValues::getVectorValues); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, scriptDocValues::getMagnitudes); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + + public void testByteMissingValues() throws IOException { + int dims = 3; + float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; + float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); + BinaryDocValues magnitudeValues = wrap(magnitudes); + MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.BYTE, + dims + ); + MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); + + field.setNextDocId(3); + assertEquals(0, field.size()); + Exception e = expectThrows(IllegalArgumentException.class, scriptDocValues::getVectorValues); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, scriptDocValues::getMagnitudes); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + + public void testFloatGetFunctionIsNotAccessible() throws IOException { + int dims = 3; + float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; + float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); + BinaryDocValues magnitudeValues = wrap(magnitudes); + MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.FLOAT, + dims + ); + MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); + + field.setNextDocId(0); + Exception e = expectThrows(UnsupportedOperationException.class, () -> scriptDocValues.get(0)); + assertThat( + e.getMessage(), + containsString( + "accessing a multi-vector field's value through 'get' or 'value' is not supported," + + " use 'vectorValues' or 'magnitudes' instead." + ) + ); + } + + public void testByteGetFunctionIsNotAccessible() throws IOException { + int dims = 3; + float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; + float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); + BinaryDocValues magnitudeValues = wrap(magnitudes); + MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.BYTE, + dims + ); + MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); + + field.setNextDocId(0); + Exception e = expectThrows(UnsupportedOperationException.class, () -> scriptDocValues.get(0)); + assertThat( + e.getMessage(), + containsString( + "accessing a multi-vector field's value through 'get' or 'value' is not supported," + + " use 'vectorValues' or 'magnitudes' instead." + ) + ); + } + + public static BinaryDocValues wrap(float[][] magnitudes) { + return new BinaryDocValues() { + int idx = -1; + int maxIdx = magnitudes.length; + + @Override + public BytesRef binaryValue() { + if (idx >= maxIdx) { + throw new IllegalStateException("max index exceeded"); + } + ByteBuffer magnitudeBuffer = ByteBuffer.allocate(magnitudes[idx].length * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN); + for (float magnitude : magnitudes[idx]) { + magnitudeBuffer.putFloat(magnitude); + } + return new BytesRef(magnitudeBuffer.array()); + } + + @Override + public boolean advanceExact(int target) { + idx = target; + if (target < maxIdx) { + return true; + } + return false; + } + + @Override + public int docID() { + return idx; + } + + @Override + public int nextDoc() { + return idx++; + } + + @Override + public int advance(int target) { + throw new IllegalArgumentException("not defined!"); + } + + @Override + public long cost() { + throw new IllegalArgumentException("not defined!"); + } + }; + } + + public static BinaryDocValues wrap(float[][][] vectors, ElementType elementType) { + return new BinaryDocValues() { + int idx = -1; + int maxIdx = vectors.length; + + @Override + public BytesRef binaryValue() { + if (idx >= maxIdx) { + throw new IllegalStateException("max index exceeded"); + } + return mockEncodeDenseVector(vectors[idx], elementType, IndexVersion.current()); + } + + @Override + public boolean advanceExact(int target) { + idx = target; + if (target < maxIdx) { + return true; + } + return false; + } + + @Override + public int docID() { + return idx; + } + + @Override + public int nextDoc() { + return idx++; + } + + @Override + public int advance(int target) { + throw new IllegalArgumentException("not defined!"); + } + + @Override + public long cost() { + throw new IllegalArgumentException("not defined!"); + } + }; + } + + public static BytesRef mockEncodeDenseVector(float[][] values, ElementType elementType, IndexVersion indexVersion) { + int dims = values[0].length; + if (elementType == ElementType.BIT) { + dims *= Byte.SIZE; + } + int numBytes = elementType.getNumBytes(dims); + ByteBuffer byteBuffer = elementType.createByteBuffer(indexVersion, numBytes * values.length); + for (float[] vector : values) { + for (float value : vector) { + if (elementType == ElementType.FLOAT) { + byteBuffer.putFloat(value); + } else if (elementType == ElementType.BYTE || elementType == ElementType.BIT) { + byteBuffer.put((byte) value); + } else { + throw new IllegalStateException("unknown element_type [" + elementType + "]"); + } + } + } + return new BytesRef(byteBuffer.array()); + } + +} diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesModuleTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesModuleTests.java index aa47f3c066f5..7f6885e7a977 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesModuleTests.java @@ -65,14 +65,26 @@ public void setUp() throws Exception { } public void testCanRegisterTwoRepositoriesWithDifferentTypes() { - when(plugin1.getRepositories(eq(environment), eq(contentRegistry), eq(clusterService), - eq(MockBigArrays.NON_RECYCLING_INSTANCE), eq(recoverySettings), - any(RepositoriesMetrics.class))) - .thenReturn(Collections.singletonMap("type1", factory)); - when(plugin2.getRepositories(eq(environment), eq(contentRegistry), eq(clusterService), - eq(MockBigArrays.NON_RECYCLING_INSTANCE), eq(recoverySettings), - any(RepositoriesMetrics.class))) - .thenReturn(Collections.singletonMap("type2", factory)); + when( + plugin1.getRepositories( + eq(environment), + eq(contentRegistry), + eq(clusterService), + eq(MockBigArrays.NON_RECYCLING_INSTANCE), + eq(recoverySettings), + any(RepositoriesMetrics.class) + ) + ).thenReturn(Collections.singletonMap("type1", factory)); + when( + plugin2.getRepositories( + eq(environment), + eq(contentRegistry), + eq(clusterService), + eq(MockBigArrays.NON_RECYCLING_INSTANCE), + eq(recoverySettings), + any(RepositoriesMetrics.class) + ) + ).thenReturn(Collections.singletonMap("type2", factory)); // Would throw new RepositoriesModule( @@ -83,18 +95,32 @@ public void testCanRegisterTwoRepositoriesWithDifferentTypes() { mock(ClusterService.class), MockBigArrays.NON_RECYCLING_INSTANCE, contentRegistry, - recoverySettings, TelemetryProvider.NOOP); + recoverySettings, + TelemetryProvider.NOOP + ); } public void testCannotRegisterTwoRepositoriesWithSameTypes() { - when(plugin1.getRepositories(eq(environment), eq(contentRegistry), eq(clusterService), - eq(MockBigArrays.NON_RECYCLING_INSTANCE), eq(recoverySettings), - any(RepositoriesMetrics.class))) - .thenReturn(Collections.singletonMap("type1", factory)); - when(plugin2.getRepositories(eq(environment), eq(contentRegistry), eq(clusterService), - eq(MockBigArrays.NON_RECYCLING_INSTANCE), eq(recoverySettings), - any(RepositoriesMetrics.class))) - .thenReturn(Collections.singletonMap("type1", factory)); + when( + plugin1.getRepositories( + eq(environment), + eq(contentRegistry), + eq(clusterService), + eq(MockBigArrays.NON_RECYCLING_INSTANCE), + eq(recoverySettings), + any(RepositoriesMetrics.class) + ) + ).thenReturn(Collections.singletonMap("type1", factory)); + when( + plugin2.getRepositories( + eq(environment), + eq(contentRegistry), + eq(clusterService), + eq(MockBigArrays.NON_RECYCLING_INSTANCE), + eq(recoverySettings), + any(RepositoriesMetrics.class) + ) + ).thenReturn(Collections.singletonMap("type1", factory)); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -106,7 +132,9 @@ public void testCannotRegisterTwoRepositoriesWithSameTypes() { clusterService, MockBigArrays.NON_RECYCLING_INSTANCE, contentRegistry, - recoverySettings, TelemetryProvider.NOOP) + recoverySettings, + TelemetryProvider.NOOP + ) ); assertEquals("Repository type [type1] is already registered", ex.getMessage()); @@ -130,17 +158,25 @@ public void testCannotRegisterTwoInternalRepositoriesWithSameTypes() { clusterService, MockBigArrays.NON_RECYCLING_INSTANCE, contentRegistry, - recoverySettings, TelemetryProvider.NOOP) + recoverySettings, + TelemetryProvider.NOOP + ) ); assertEquals("Internal repository type [type1] is already registered", ex.getMessage()); } public void testCannotRegisterNormalAndInternalRepositoriesWithSameTypes() { - when(plugin1.getRepositories(eq(environment), eq(contentRegistry), eq(clusterService), - eq(MockBigArrays.NON_RECYCLING_INSTANCE), eq(recoverySettings), - any(RepositoriesMetrics.class))) - .thenReturn(Collections.singletonMap("type1", factory)); + when( + plugin1.getRepositories( + eq(environment), + eq(contentRegistry), + eq(clusterService), + eq(MockBigArrays.NON_RECYCLING_INSTANCE), + eq(recoverySettings), + any(RepositoriesMetrics.class) + ) + ).thenReturn(Collections.singletonMap("type1", factory)); when(plugin2.getInternalRepositories(environment, contentRegistry, clusterService, recoverySettings)).thenReturn( Collections.singletonMap("type1", factory) ); @@ -155,7 +191,9 @@ public void testCannotRegisterNormalAndInternalRepositoriesWithSameTypes() { clusterService, MockBigArrays.NON_RECYCLING_INSTANCE, contentRegistry, - recoverySettings, TelemetryProvider.NOOP) + recoverySettings, + TelemetryProvider.NOOP + ) ); assertEquals("Internal repository type [type1] is already registered as a non-internal repository", ex.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java index c699e117ffbf..d5e930321db9 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java @@ -77,7 +77,7 @@ protected SearchHit nextDoc(int doc) { } }; - SearchHit[] hits = it.iterate(null, reader, docs); + SearchHit[] hits = it.iterate(null, reader, docs, randomBoolean()); assertThat(hits.length, equalTo(docs.length)); for (int i = 0; i < hits.length; i++) { @@ -125,7 +125,7 @@ protected SearchHit nextDoc(int doc) { } }; - Exception e = expectThrows(FetchPhaseExecutionException.class, () -> it.iterate(null, reader, docs)); + Exception e = expectThrows(FetchPhaseExecutionException.class, () -> it.iterate(null, reader, docs, randomBoolean())); assertThat(e.getMessage(), containsString("Error running fetch phase for doc [" + badDoc + "]")); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 3699cdee3912..d1bbc1ec5910 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -607,17 +607,6 @@ public void testOrderSerialization() throws Exception { } } - public void testForceSourceDeprecation() throws IOException { - String highlightJson = """ - { "fields" : { }, "force_source" : true } - """; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightJson)) { - HighlightBuilder.fromXContent(parser); - } - - assertWarnings("Deprecated field [force_source] used, this field is unused and will be removed entirely"); - } - protected static XContentBuilder toXContent(HighlightBuilder highlight, XContentType contentType) throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(contentType); if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 077877f71357..cf240550e809 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -146,7 +146,6 @@ import org.elasticsearch.index.seqno.RetentionLeaseSyncer; import org.elasticsearch.index.shard.PrimaryReplicaSyncer; import org.elasticsearch.indices.EmptySystemIndices; -import org.elasticsearch.indices.IndicesFeatures; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesServiceBuilder; @@ -2245,7 +2244,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { .scriptService(scriptService) .clusterService(clusterService) .client(client) - .featureService(new FeatureService(List.of(new IndicesFeatures()))) + .featureService(new FeatureService(List.of())) .metaStateService(new MetaStateService(nodeEnv, namedXContentRegistry)) .mapperMetrics(MapperMetrics.NOOP) .build(); diff --git a/settings.gradle b/settings.gradle index 54a9514490db..d04d45bffc3a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -4,9 +4,6 @@ import org.elasticsearch.gradle.internal.toolchain.AdoptiumJdkToolchainResolver pluginManagement { repositories { - maven { - url 'https://jitpack.io' - } mavenCentral() gradlePluginPortal() } diff --git a/test/external-modules/apm-integration/build.gradle b/test/external-modules/apm-integration/build.gradle index d0f5f889e9b3..91e01d363749 100644 --- a/test/external-modules/apm-integration/build.gradle +++ b/test/external-modules/apm-integration/build.gradle @@ -17,7 +17,7 @@ tasks.named("test").configure { } tasks.named('javaRestTest').configure { - it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } } dependencies { diff --git a/test/external-modules/delayed-aggs/build.gradle b/test/external-modules/delayed-aggs/build.gradle index c17fa64f4d12..f57bd37d6517 100644 --- a/test/external-modules/delayed-aggs/build.gradle +++ b/test/external-modules/delayed-aggs/build.gradle @@ -11,7 +11,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' tasks.named('yamlRestTest').configure { - it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } } esplugin { diff --git a/test/external-modules/die-with-dignity/build.gradle b/test/external-modules/die-with-dignity/build.gradle index 34a9a71533d3..1f98e4305258 100644 --- a/test/external-modules/die-with-dignity/build.gradle +++ b/test/external-modules/die-with-dignity/build.gradle @@ -17,5 +17,5 @@ tasks.named("test").configure { } tasks.named('javaRestTest').configure { - it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } } diff --git a/test/external-modules/error-query/build.gradle b/test/external-modules/error-query/build.gradle index de9101364ca0..ff4783552ebf 100644 --- a/test/external-modules/error-query/build.gradle +++ b/test/external-modules/error-query/build.gradle @@ -11,7 +11,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' tasks.named('yamlRestTest').configure { - it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } } esplugin { diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle index 2276766d52df..3d6291f6d011 100644 --- a/test/external-modules/esql-heap-attack/build.gradle +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -21,5 +21,5 @@ esplugin { tasks.named('javaRestTest') { usesDefaultDistribution() - it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } } diff --git a/test/external-modules/jvm-crash/build.gradle b/test/external-modules/jvm-crash/build.gradle index c1344d570a33..73ad8b851a22 100644 --- a/test/external-modules/jvm-crash/build.gradle +++ b/test/external-modules/jvm-crash/build.gradle @@ -21,5 +21,5 @@ esplugin { tasks.named('javaRestTest') { usesDefaultDistribution() - it.onlyIf("snapshot build") { BuildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } } diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 8911bdf286a0..9dc0263f49ae 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -8,7 +8,7 @@ */ apply plugin: 'elasticsearch.java' -apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'com.gradleup.shadow' import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar diff --git a/test/framework/build.gradle b/test/framework/build.gradle index f130ecf13184..c61a3b1851ea 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -85,9 +85,9 @@ tasks.named("thirdPartyAudit").configure { } tasks.named("test").configure { - systemProperty 'tests.gradle_index_compat_versions', BuildParams.bwcVersions.indexCompatible.join(',') - systemProperty 'tests.gradle_wire_compat_versions', BuildParams.bwcVersions.wireCompatible.join(',') - systemProperty 'tests.gradle_unreleased_versions', BuildParams.bwcVersions.unreleased.join(',') + systemProperty 'tests.gradle_index_compat_versions', buildParams.bwcVersions.indexCompatible.join(',') + systemProperty 'tests.gradle_wire_compat_versions', buildParams.bwcVersions.wireCompatible.join(',') + systemProperty 'tests.gradle_unreleased_versions', buildParams.bwcVersions.unreleased.join(',') } tasks.register("integTest", Test) { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index b50fd4e96044..51f66418bb44 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -53,6 +53,8 @@ import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.TriConsumer; @@ -143,8 +145,10 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase; import org.elasticsearch.search.fetch.subphase.FetchSourcePhase; +import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.SubSearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; @@ -466,7 +470,18 @@ private SubSearchContext buildSubSearchContext( .when(subContext) .getNestedDocuments(); when(ctx.getSearchExecutionContext()).thenReturn(subContext); - + ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + new SearchRequest().allowPartialSearchResults(randomBoolean()), + new ShardId("index", "indexUUID", 0), + 0, + 1, + AliasFilter.EMPTY, + 1f, + 0L, + null + ); + when(ctx.request()).thenReturn(request); IndexShard indexShard = mock(IndexShard.class); when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0)); when(indexShard.indexSettings()).thenReturn(indexSettings); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index e43aa940a488..a10394b4156d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -17,7 +17,6 @@ import java.util.Map; import static java.util.Map.entry; -import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; /** * This class groups historical features that have been removed from the production codebase, but are still used by the test @@ -30,15 +29,6 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature COMPONENT_TEMPLATE_SUPPORTED = new NodeFeature("indices.component_template_supported"); public static final NodeFeature ML_NEW_MEMORY_FORMAT = new NodeFeature("ml.new_memory_format"); - /** These are "pure test" features: normally we would not need them, and test for TransportVersion/fallback to Version (see for example - * {@code ESRestTestCase#minimumTransportVersion()}. However, some tests explicitly check and validate the content of a response, so - * we need these features to support them. - */ - public static final NodeFeature TRANSPORT_VERSION_SUPPORTED = new NodeFeature("transport_version_supported"); - public static final NodeFeature STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION = new NodeFeature( - "state.transport_version_to_nodes_version" - ); - // Ref: https://github.com/elastic/elasticsearch/pull/86416 public static final NodeFeature ML_MEMORY_OVERHEAD_FIXED = new NodeFeature("ml.memory_overhead_fixed"); @@ -103,8 +93,6 @@ public Map getHistoricalFeatures() { entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), entry(ML_NEW_MEMORY_FORMAT, Version.V_8_11_0), - entry(TRANSPORT_VERSION_SUPPORTED, VERSION_INTRODUCING_TRANSPORT_VERSIONS), - entry(STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION, Version.V_8_11_0), entry(ML_MEMORY_OVERHEAD_FIXED, Version.V_8_2_1), entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1), entry(DESIRED_NODE_API_SUPPORTED, Version.V_8_1_0), diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json index e69c2db6ff40..58e0ea5fd9fc 100644 --- a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json +++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json @@ -21,7 +21,52 @@ "dict_dec":{ "type":"dictionary_decompounder", "word_list":["donau", "dampf", "schiff", "spargel", "creme", "suppe"] - } + }, + "hyphenation_dec_only_longest_match": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "only_longest_match": true + }, + "hyphenation_dec_no_sub_matches": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "no_sub_matches": true + }, + "hyphenation_dec_no_overlapping_matches": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "no_overlapping_matches": true + } }, "analyzer":{ "standard":{ @@ -47,6 +92,18 @@ "decompoundingAnalyzer":{ "tokenizer":"standard", "filter":["dict_dec"] + }, + "hyphenationDecompoundingAnalyzerOnlyLongestMatch":{ + "tokenizer":"standard", + "filter":["hyphenation_dec_only_longest_match"] + }, + "hyphenationDecompoundingAnalyzerNoSubMatches": { + "tokenizer":"standard", + "filter":["hyphenation_dec_no_sub_matches"] + }, + "hyphenationDecompoundingAnalyzerNoOverlappingMatches":{ + "tokenizer":"standard", + "filter":["hyphenation_dec_no_overlapping_matches"] } } } diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml index 82f933296a31..095b27e0fa07 100644 --- a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml +++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml @@ -15,6 +15,21 @@ index : dict_dec : type : dictionary_decompounder word_list : [donau, dampf, schiff, spargel, creme, suppe] + hyphenation_dec_only_longest_match : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + only_longest_match : true + hyphenation_dec_no_sub_matches : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + no_sub_matches : true + hyphenation_dec_no_overlapping_matches : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + no_overlapping_matches: true analyzer : standard : type : standard @@ -37,3 +52,13 @@ index : decompoundingAnalyzer : tokenizer : standard filter : [dict_dec] + hyphenationDecompoundingAnalyzerOnlyLongestMatch : + tokenizer : standard + filter : [hyphenation_dec_only_longest_match] + hyphenationDecompoundingAnalyzerNoSubMatches: + tokenizer: standard + filter : [hyphenation_dec_no_sub_matches] + hyphenationDecompoundingAnalyzerNoOverlappingMatches: + tokenizer: standard + filter : [hyphenation_dec_no_overlapping_matches] + diff --git a/test/immutable-collections-patch/build.gradle b/test/immutable-collections-patch/build.gradle index 28aad9675462..381c0cd6dd04 100644 --- a/test/immutable-collections-patch/build.gradle +++ b/test/immutable-collections-patch/build.gradle @@ -26,14 +26,14 @@ def outputDir = layout.buildDirectory.dir("jdk-patches") def generatePatch = tasks.register("generatePatch", JavaExec) generatePatch.configure { dependsOn tasks.named("compileJava") - inputs.property("java-home-set", BuildParams.getIsRuntimeJavaHomeSet()) - inputs.property("java-version", BuildParams.runtimeJavaVersion) + inputs.property("java-home-set", buildParams.getIsRuntimeJavaHomeSet()) + inputs.property("java-version", buildParams.runtimeJavaVersion) outputs.dir(outputDir) classpath = sourceSets.main.runtimeClasspath mainClass = 'org.elasticsearch.jdk.patch.ImmutableCollectionsPatcher' - if (BuildParams.getIsRuntimeJavaHomeSet()) { - executable = "${BuildParams.runtimeJavaHome}/bin/java" + (OS.current() == OS.WINDOWS ? '.exe' : '') + if (buildParams.getIsRuntimeJavaHomeSet()) { + executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + (OS.current() == OS.WINDOWS ? '.exe' : '') } else { javaLauncher = javaToolchains.launcherFor { languageVersion = JavaLanguageVersion.of(VersionProperties.bundledJdkMajorVersion) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index f996db92e57f..11787866af0d 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -18,9 +18,7 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), - SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - ML_SCALE_FROM_ZERO("es.ml_scale_from_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); + SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/libs/es-opensaml-security-api/build.gradle b/x-pack/libs/es-opensaml-security-api/build.gradle index b36d0bfa7b37..3b4434ec5d9e 100644 --- a/x-pack/libs/es-opensaml-security-api/build.gradle +++ b/x-pack/libs/es-opensaml-security-api/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' -apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'com.gradleup.shadow' dependencies { implementation "org.opensaml:opensaml-security-api:${versions.opensaml}" diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index c451df58b9fa..ddc075cc9adc 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -21,7 +21,7 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java index 466c9e4f006d..228ac401b96b 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -33,8 +32,7 @@ public APMIndexTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super( nodeSettings, @@ -42,7 +40,6 @@ public APMIndexTemplateRegistry( threadPool, client, xContentRegistry, - featureService, templateFilter(isDataStreamsLifecycleOnlyMode(clusterService.getSettings())) ); } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java index aefb45f6186c..0be95c337838 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java @@ -48,14 +48,7 @@ public Collection createComponents(PluginServices services) { Settings settings = services.environment().settings(); ClusterService clusterService = services.clusterService(); registry.set( - new APMIndexTemplateRegistry( - settings, - clusterService, - services.threadPool(), - services.client(), - services.xContentRegistry(), - services.featureService() - ) + new APMIndexTemplateRegistry(settings, clusterService, services.threadPool(), services.client(), services.xContentRegistry()) ); if (enabled) { APMIndexTemplateRegistry registryInstance = registry.get(); diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml index ac6462c86676..a5a3a7433f4c 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml @@ -4,6 +4,7 @@ _meta: managed: true template: mappings: + date_detection: false dynamic: true dynamic_templates: - numeric_labels: diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java index 476b504339e6..b18e95b55dde 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java @@ -14,8 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -24,7 +22,6 @@ import org.junit.After; import org.junit.Before; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -56,15 +53,13 @@ public void createRegistryAndClient() { additionalSettings, clusterSettings ); - FeatureService featureService = new FeatureService(List.of(new DataStreamFeatures())); apmIndexTemplateRegistry = new APMIndexTemplateRegistry( Settings.EMPTY, clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); apmIndexTemplateRegistry.setEnabled(true); } diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index 4a2b9265b3b0..32e7c2225e19 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.apmdata; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -30,8 +29,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -92,9 +89,8 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, clusterSettings); - FeatureService featureService = new FeatureService(List.of(new DataStreamFeatures())); stackTemplateRegistryAccessor = new StackTemplateRegistryAccessor( - new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY, featureService) + new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY) ); apmIndexTemplateRegistry = new APMIndexTemplateRegistry( @@ -102,8 +98,7 @@ public void createRegistryAndClient() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); apmIndexTemplateRegistry.setEnabled(true); } @@ -408,25 +403,6 @@ public void testIndexTemplateConventions() throws Exception { } } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_10_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.11.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), nodes); - apmIndexTemplateRegistry.clusterChanged(event); - } - public void testILMComponentTemplatesInstalled() throws Exception { int ilmFallbackCount = 0; for (Map.Entry entry : apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet()) { diff --git a/x-pack/plugin/async-search/qa/rest/build.gradle b/x-pack/plugin/async-search/qa/rest/build.gradle index 4fc557a5b604..c95064693077 100644 --- a/x-pack/plugin/async-search/qa/rest/build.gradle +++ b/x-pack/plugin/async-search/qa/rest/build.gradle @@ -28,5 +28,5 @@ testClusters.configureEach { // Test clusters run with security disabled tasks.named("yamlRestTest") { - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 193a82436f26..e25d7fb359ac 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -37,7 +37,7 @@ artifacts { def restTestBlacklist = [] // TODO: fix this rest test to not depend on a hardcoded port! restTestBlacklist.addAll(['getting_started/10_monitor_cluster_health/*']) -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { // these tests attempt to install basic/internal licenses signed against the dev/public.key // Since there is no infrastructure in place (anytime soon) to generate licenses using the production // private key, these tests are blacklisted in non-snapshot test runs diff --git a/x-pack/plugin/ccr/qa/build.gradle b/x-pack/plugin/ccr/qa/build.gradle index 583ad5d8c3df..4be504e61692 100644 --- a/x-pack/plugin/ccr/qa/build.gradle +++ b/x-pack/plugin/ccr/qa/build.gradle @@ -10,6 +10,6 @@ subprojects { tasks.withType(Test).configureEach { // These fail in CI but only when run as part of checkPart2 and not individually. // Tracked in : https://github.com/elastic/elasticsearch/issues/66661 - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } } diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index fb4acb0055a8..b4f17cb436df 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -94,7 +94,7 @@ tasks.named("processResources").configure { String licenseKey = providers.systemProperty("license.key").getOrNull() if (licenseKey != null) { println "Using provided license key from ${licenseKey}" - } else if (BuildParams.isSnapshotBuild()) { + } else if (buildParams.isSnapshotBuild()) { licenseKey = Paths.get(project.projectDir.path, 'snapshot.key') } else { throw new IllegalArgumentException('Property license.key must be set for release build') @@ -155,13 +155,13 @@ testClusters.configureEach { requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.15.0") } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.withType(Test).configureEach { systemProperty 'es.failure_store_feature_flag_enabled', 'true' } } -if (BuildParams.inFipsJvm) { +if (buildParams.inFipsJvm) { // Test clusters run with security disabled tasks.named("javaRestTest").configure { enabled = false } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java index 06393dfa3bad..155ea0ffdcbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.stats.HealthApiStatsAction; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.protocol.xpack.XPackUsageRequest; @@ -30,10 +28,7 @@ */ public class HealthApiUsageTransportAction extends XPackUsageFeatureTransportAction { - static final NodeFeature SUPPORTS_HEALTH_STATS = new NodeFeature("health.supports_health_stats"); - private final Client client; - private final FeatureService featureService; @Inject public HealthApiUsageTransportAction( @@ -42,8 +37,7 @@ public HealthApiUsageTransportAction( ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client, - FeatureService featureService + Client client ) { super( XPackUsageFeatureAction.HEALTH.name(), @@ -54,7 +48,6 @@ public HealthApiUsageTransportAction( indexNameExpressionResolver ); this.client = client; - this.featureService = featureService; } @Override @@ -70,7 +63,7 @@ protected void masterOperation( client.threadPool().getThreadContext() ); - if (state.clusterRecovered() && featureService.clusterHasFeature(state, SUPPORTS_HEALTH_STATS)) { + if (state.clusterRecovered()) { HealthApiStatsAction.Request statsRequest = new HealthApiStatsAction.Request(); statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); client.execute(HealthApiStatsAction.INSTANCE, statsRequest, preservingListener.delegateFailureAndWrap((l, r) -> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java index b885a90c30e5..f966bf97f476 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.core; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.license.License; import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; -import java.util.Map; import java.util.Set; /** @@ -32,9 +30,4 @@ public Set getFeatures() { LOGSDB_TELMETRY_STATS ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(HealthApiUsageTransportAction.SUPPORTS_HEALTH_STATS, Version.V_8_7_0); - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 2380c13e147d..fc14ec681101 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -402,67 +402,6 @@ private static Map initializeReservedRoles() { "Grants access necessary for the APM system user to send system-level data (such as monitoring) to Elasticsearch.\n" ) ), - entry( - "apm_user", - new RoleDescriptor( - "apm_user", - null, - new RoleDescriptor.IndicesPrivileges[] { - // Self managed APM Server - // Can be removed in 8.0 - RoleDescriptor.IndicesPrivileges.builder().indices("apm-*").privileges("read", "view_index_metadata").build(), - - // APM Server under fleet (data streams) - RoleDescriptor.IndicesPrivileges.builder().indices("logs-apm.*").privileges("read", "view_index_metadata").build(), - RoleDescriptor.IndicesPrivileges.builder().indices("logs-apm-*").privileges("read", "view_index_metadata").build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("metrics-apm.*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("metrics-apm-*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("traces-apm.*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("traces-apm-*") - .privileges("read", "view_index_metadata") - .build(), - - // Machine Learning indices. Only needed for legacy reasons - // Can be removed in 8.0 - RoleDescriptor.IndicesPrivileges.builder() - .indices(".ml-anomalies*") - .privileges("read", "view_index_metadata") - .build(), - - // Annotations - RoleDescriptor.IndicesPrivileges.builder() - .indices("observability-annotations") - .privileges("read", "view_index_metadata") - .build() }, - new RoleDescriptor.ApplicationResourcePrivileges[] { - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("kibana-*") - .resources("*") - .privileges("reserved_ml_apm_user") - .build() }, - null, - null, - MetadataUtils.getDeprecatedReservedMetadata( - "This role will be removed in a future major release. Please use editor and viewer roles instead" - ), - null, - null, - null, - null, - "Grants the privileges required for APM users (such as read and view_index_metadata privileges " - + "on the apm-* and .ml-anomalies* indices)." - ) - ), entry( "inference_admin", new RoleDescriptor( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java index a30236b2fef2..cf0a73963f86 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java @@ -11,15 +11,12 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -44,16 +41,12 @@ */ public abstract class YamlTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(YamlTemplateRegistry.class); - // this node feature is a redefinition of {@link DataStreamFeatures#DATA_STREAM_LIFECYCLE} and it's meant to avoid adding a - // dependency to the data-streams module just for this - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); private final int version; private final Map componentTemplates; private final Map composableIndexTemplates; private final List ingestPipelines; private final List lifecyclePolicies; - private final FeatureService featureService; private volatile boolean enabled; public YamlTemplateRegistry( @@ -61,10 +54,9 @@ public YamlTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - this(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService, ignored -> true); + this(nodeSettings, clusterService, threadPool, client, xContentRegistry, ignored -> true); } @SuppressWarnings({ "unchecked", "this-escape" }) @@ -74,7 +66,6 @@ public YamlTemplateRegistry( ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry, - FeatureService featureService, Predicate templateFilter ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); @@ -123,7 +114,6 @@ public YamlTemplateRegistry( .filter(templateFilter) .map(this::loadLifecyclePolicy) .collect(Collectors.toList()); - this.featureService = featureService; } catch (IOException e) { throw new ElasticsearchException(e); } @@ -152,13 +142,6 @@ public void close() { clusterService.removeListener(this); } - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only after versions that support data stream lifecycle - // due to the use of the feature in all the `@lifecycle` component templates - return featureService.clusterHasFeature(event.state(), DATA_STREAM_LIFECYCLE); - } - @Override protected boolean requiresMasterNode() { return true; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index fb4d822b7655..9818a890d465 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -3058,89 +3058,6 @@ public void testAPMSystemRole() { assertNoAccessAllowed(APMSystemRole, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); } - public void testAPMUserRole() { - final TransportRequest request = mock(TransportRequest.class); - final Authentication authentication = AuthenticationTestHelper.builder().build(); - - final RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("apm_user"); - assertNotNull(roleDescriptor); - assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); - - final String allowedApplicationActionPattern = "example/custom/action/*"; - final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); - Role role = Role.buildFromRoleDescriptor( - roleDescriptor, - new FieldPermissionsCache(Settings.EMPTY), - RESTRICTED_INDICES, - List.of( - new ApplicationPrivilegeDescriptor( - kibanaApplicationWithRandomIndex, - "reserved_ml_apm_user", - Set.of(allowedApplicationActionPattern), - Map.of() - ) - ) - ); - - assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); - assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); - - assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, "foo-apm"); - assertNoAccessAllowed(role, "foo-logs-apm.bar"); - assertNoAccessAllowed(role, "foo-logs-apm-bar"); - assertNoAccessAllowed(role, "foo-traces-apm.bar"); - assertNoAccessAllowed(role, "foo-traces-apm-bar"); - assertNoAccessAllowed(role, "foo-metrics-apm.bar"); - assertNoAccessAllowed(role, "foo-metrics-apm-bar"); - - assertOnlyReadAllowed(role, "logs-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "logs-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "traces-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "traces-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "metrics-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "metrics-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); - - assertOnlyReadAllowed(role, "observability-annotations"); - - assertThat( - role.application().grants(ApplicationPrivilegeTests.createPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), - is(false) - ); - assertThat( - role.application() - .grants( - ApplicationPrivilegeTests.createPrivilege( - kibanaApplicationWithRandomIndex, - "app-reserved_ml_apm_user", - allowedApplicationActionPattern - ), - "*" - ), - is(true) - ); - - final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); - assertThat( - role.application().grants(ApplicationPrivilegeTests.createPrivilege(otherApplication, "app-foo", "foo"), "*"), - is(false) - ); - assertThat( - role.application() - .grants( - ApplicationPrivilegeTests.createPrivilege( - otherApplication, - "app-reserved_ml_apm_user", - allowedApplicationActionPattern - ), - "*" - ), - is(false) - ); - } - public void testMachineLearningAdminRole() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = AuthenticationTestHelper.builder().build(); diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle index 2d8859bdcea3..a9580f4e14d6 100644 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle @@ -37,6 +37,6 @@ testClusters.configureEach { // Test clusters run with security disabled tasks.named("javaRestTest") { - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/plugin/deprecation/qa/rest/build.gradle b/x-pack/plugin/deprecation/qa/rest/build.gradle index 70c0cadbce37..9a8b228763fe 100644 --- a/x-pack/plugin/deprecation/qa/rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/rest/build.gradle @@ -34,5 +34,5 @@ testClusters.configureEach { // Test clusters run with security disabled tasks.named("javaRestTest") { - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle index 6b1c7e42c0fd..c4f2a239d48e 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -29,7 +29,7 @@ def supportedVersion = bwcVersion -> { return bwcVersion.onOrAfter("8.10.0") && bwcVersion != VersionProperties.elasticsearchVersion } -BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> def yamlRestTest = tasks.register("v${bwcVersion}#yamlRestTest", StandaloneRestIntegTestTask) { usesDefaultDistribution() diff --git a/x-pack/plugin/downsample/qa/rest/build.gradle b/x-pack/plugin/downsample/qa/rest/build.gradle index ba5ac7b0c731..c5cfbea000eb 100644 --- a/x-pack/plugin/downsample/qa/rest/build.gradle +++ b/x-pack/plugin/downsample/qa/rest/build.gradle @@ -32,7 +32,7 @@ tasks.named('yamlRestTest') { tasks.named('yamlRestCompatTest') { usesDefaultDistribution() } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("yamlRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/downsample/qa/with-security/build.gradle b/x-pack/plugin/downsample/qa/with-security/build.gradle index 5eed73595018..849c242f372b 100644 --- a/x-pack/plugin/downsample/qa/with-security/build.gradle +++ b/x-pack/plugin/downsample/qa/with-security/build.gradle @@ -28,7 +28,7 @@ testClusters.configureEach { user username: 'elastic_admin', password: 'admin-password' } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("yamlRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle index 69fec4ad32c7..844cfcc61adf 100644 --- a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle +++ b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle @@ -6,7 +6,7 @@ dependencies { javaRestTestImplementation project(path: xpackModule('core')) javaRestTestImplementation project(path: xpackModule('enrich:qa:common')) } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/enrich/qa/rest/build.gradle b/x-pack/plugin/enrich/qa/rest/build.gradle index f96eff5f933c..637ab21a98fd 100644 --- a/x-pack/plugin/enrich/qa/rest/build.gradle +++ b/x-pack/plugin/enrich/qa/rest/build.gradle @@ -19,7 +19,7 @@ dependencies { javaRestTestImplementation project(path: xpackModule('enrich:qa:common')) } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("javaRestTest").configure{enabled = false } tasks.named("yamlRestTest").configure{enabled = false } diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle index e84adf0c0325..47a1ffaa37fa 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle @@ -17,7 +17,7 @@ dependencies { javaRestTestImplementation(testArtifact(project(":qa:full-cluster-restart"), "javaRestTest")) } -BuildParams.bwcVersions.withWireCompatible(v -> v.after("8.8.0")) { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible(v -> v.after("8.8.0")) { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index a354ca4b4b31..df1c76ccf770 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -456,7 +456,6 @@ public Collection createComponents(PluginServices services) { // Behavioral analytics components final AnalyticsTemplateRegistry analyticsTemplateRegistry = new AnalyticsTemplateRegistry( services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() @@ -466,7 +465,6 @@ public Collection createComponents(PluginServices services) { // Connector components final ConnectorTemplateRegistry connectorTemplateRegistry = new ConnectorTemplateRegistry( services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java index 86882a28ec39..ba121f2cf865 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java @@ -7,15 +7,11 @@ package org.elasticsearch.xpack.application; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.application.analytics.AnalyticsTemplateRegistry; -import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.rules.action.ListQueryRulesetsAction; import org.elasticsearch.xpack.application.rules.retriever.QueryRuleRetrieverBuilder; -import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.application.rules.action.TestQueryRulesetAction.QUERY_RULES_TEST_API; @@ -30,14 +26,4 @@ public Set getFeatures() { ListQueryRulesetsAction.QUERY_RULE_LIST_TYPES ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of( - ConnectorTemplateRegistry.CONNECTOR_TEMPLATES_FEATURE, - Version.V_8_10_0, - AnalyticsTemplateRegistry.ANALYTICS_TEMPLATE_FEATURE, - Version.V_8_12_0 - ); - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java index d9f433b8052b..99a239dd617a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java @@ -7,13 +7,10 @@ package org.elasticsearch.xpack.application.analytics; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -36,8 +33,6 @@ public class AnalyticsTemplateRegistry extends IndexTemplateRegistry { - public static final NodeFeature ANALYTICS_TEMPLATE_FEATURE = new NodeFeature("behavioral_analytics.templates"); - // This number must be incremented when we make changes to built-in templates. static final int REGISTRY_VERSION = 3; @@ -100,17 +95,13 @@ protected List getIngestPipelines() { ) ); - private final FeatureService featureService; - public AnalyticsTemplateRegistry( ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; } @Override @@ -138,9 +129,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), ANALYTICS_TEMPLATE_FEATURE); - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index 41976bc6b427..9b8cc7cfdbe4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.application.connector; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -33,8 +30,6 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { - public static final NodeFeature CONNECTOR_TEMPLATES_FEATURE = new NodeFeature("elastic-connectors.templates"); - // This number must be incremented when we make changes to built-in templates. static final int REGISTRY_VERSION = 3; @@ -153,17 +148,13 @@ protected List getIngestPipelines() { ) ); - private final FeatureService featureService; - public ConnectorTemplateRegistry( ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; } @Override @@ -186,9 +177,4 @@ protected boolean requiresMasterNode() { // Necessary to prevent conflicts in some mixed-cluster environments with pre-7.7 nodes return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), CONNECTOR_TEMPLATES_FEATURE); - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java index a5e2d3f79da0..3a61c848d381 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java @@ -67,7 +67,10 @@ public QueryRulesetListItem(StreamInput in) throws IOException { } else { this.criteriaTypeToCountMap = Map.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { + TransportVersion streamTransportVersion = in.getTransportVersion(); + if (streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_15) + || streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) + || streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { this.ruleTypeToCountMap = in.readMap(m -> in.readEnum(QueryRule.QueryRuleType.class), StreamInput::readInt); } else { this.ruleTypeToCountMap = Map.of(); @@ -100,7 +103,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(EXPANDED_RULESET_COUNT_TRANSPORT_VERSION)) { out.writeMap(criteriaTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt); } - if (out.getTransportVersion().onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { + TransportVersion streamTransportVersion = out.getTransportVersion(); + if (streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_15) + || streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) + || streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { out.writeMap(ruleTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt); } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java index 50102b8cfcf5..fb2fb11c7460 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.application.analytics; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -31,7 +30,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -42,7 +40,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.EnterpriseSearchFeatures; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; @@ -78,13 +75,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - registry = new AnalyticsTemplateRegistry( - clusterService, - new FeatureService(List.of(new EnterpriseSearchFeatures())), - threadPool, - client, - NamedXContentRegistry.EMPTY - ); + registry = new AnalyticsTemplateRegistry(clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -282,25 +273,6 @@ public void testThatNonExistingPipelinesAreAddedImmediately() throws Exception { assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getIngestPipelines().size()))); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_7_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.8.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - // ------------- /** diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java index 3fbc5cd749cb..a4c7015afafc 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.application.connector; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -31,7 +30,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -41,7 +39,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.EnterpriseSearchFeatures; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; @@ -81,8 +78,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - FeatureService featureService = new FeatureService(List.of(new EnterpriseSearchFeatures())); - registry = new ConnectorTemplateRegistry(clusterService, featureService, threadPool, client, NamedXContentRegistry.EMPTY); + registry = new ConnectorTemplateRegistry(clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -310,25 +306,6 @@ public void testThatNonExistingPipelinesAreAddedImmediately() throws Exception { assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getIngestPipelines().size()))); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_9_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.10.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - // ------------- /** diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java index 27ac214558f8..27d5e240534b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java @@ -59,7 +59,9 @@ protected ListQueryRulesetsAction.Response mutateInstanceForVersion( ListQueryRulesetsAction.Response instance, TransportVersion version ) { - if (version.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { + if (version.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_15) + || version.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) + || version.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { return instance; } else if (version.onOrAfter(QueryRulesetListItem.EXPANDED_RULESET_COUNT_TRANSPORT_VERSION)) { List updatedResults = new ArrayList<>(); diff --git a/x-pack/plugin/eql/build.gradle b/x-pack/plugin/eql/build.gradle index cda236c3d02a..b0b5fefa37fc 100644 --- a/x-pack/plugin/eql/build.gradle +++ b/x-pack/plugin/eql/build.gradle @@ -32,7 +32,7 @@ dependencies { * Enable QA/rest integration tests for snapshot builds only * * TODO: Enable for all builds upon this feature release * ****************************************************************/ -if (BuildParams.isSnapshotBuild()) { +if (buildParams.isSnapshotBuild()) { addQaCheckDependencies(project) } diff --git a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle index a16c24c85237..cbea0896264d 100644 --- a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle +++ b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle @@ -15,7 +15,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.bwc-test' apply plugin: 'elasticsearch.rest-resources' -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> /** * We execute tests 3 times. diff --git a/x-pack/plugin/eql/qa/correctness/build.gradle b/x-pack/plugin/eql/qa/correctness/build.gradle index d245dc444f0b..a791356499f5 100644 --- a/x-pack/plugin/eql/qa/correctness/build.gradle +++ b/x-pack/plugin/eql/qa/correctness/build.gradle @@ -41,7 +41,7 @@ def runTaskCluster = testClusters.register('runTask') { tasks.named('javaRestTest').configure { onlyIf("FIPS mode disabled and service accoutn file available") { - serviceAccountFile && BuildParams.inFipsJvm == false + serviceAccountFile && buildParams.inFipsJvm == false } testLogging { diff --git a/x-pack/plugin/eql/qa/mixed-node/build.gradle b/x-pack/plugin/eql/qa/mixed-node/build.gradle index 8b9e082215fc..d3aa227c7ef8 100644 --- a/x-pack/plugin/eql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/eql/qa/mixed-node/build.gradle @@ -13,7 +13,7 @@ dependencies { tasks.named("javaRestTest").configure { enabled = false } -BuildParams.bwcVersions.withWireCompatible(v -> v.onOrAfter("7.10.0") && +buildParams.bwcVersions.withWireCompatible(v -> v.onOrAfter("7.10.0") && v != VersionProperties.getElasticsearchVersion()) { bwcVersion, baseName -> def cluster = testClusters.register(baseName) { versions = [bwcVersion.toString(), project.version] diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index d5b0cc42091f..00f196d863f2 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -30,7 +30,7 @@ tasks.named('yamlRestCompatTest') { usesDefaultDistribution() } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("javaRestTest").configure{enabled = false } tasks.named("yamlRestTest").configure{enabled = false } diff --git a/x-pack/plugin/eql/qa/security/build.gradle b/x-pack/plugin/eql/qa/security/build.gradle index 0641c47273f0..1f0f949cab70 100644 --- a/x-pack/plugin/eql/qa/security/build.gradle +++ b/x-pack/plugin/eql/qa/security/build.gradle @@ -10,7 +10,7 @@ tasks.named('javaRestTest') { usesDefaultDistribution() } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java index cad5c631088f..a1afcdbf1f77 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression.function; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -42,6 +43,11 @@ public Nullability nullable() { return Expressions.nullable(children()); } + /** Return true if this function can be executed under the provided {@link XPackLicenseState}, otherwise false.*/ + public boolean checkLicense(XPackLicenseState state) { + return true; + } + @Override public int hashCode() { return Objects.hash(getClass(), children()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Predicates.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Predicates.java index 28bbf956fd71..e63cc1fcf25f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Predicates.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Predicates.java @@ -6,7 +6,9 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate; +import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -113,4 +115,43 @@ public static List subtract(List from, List } return diff.isEmpty() ? emptyList() : diff; } + + /** + * Given a list of expressions of predicates, extract a new expression of + * all the common ones and return it, along the original list with the + * common ones removed. + *

+ * Example: for ['field1 > 0 AND field2 > 0', 'field1 > 0 AND field3 > 0', + * 'field1 > 0'], the function will return 'field1 > 0' as the common + * predicate expression and ['field2 > 0', 'field3 > 0', Literal.TRUE] as + * the left predicates list. + * + * @param expressions list of expressions to extract common predicates from. + * @return a tuple having as the first element an expression of the common + * predicates and as the second element the list of expressions with the + * common predicates removed. If there are no common predicates, `null` will + * be returned as the first element and the original list as the second. If + * for one of the expressions in the input list, nothing is left after + * trimming the common predicates, it will be replaced with Literal.TRUE. + */ + public static Tuple> extractCommon(List expressions) { + List common = null; + List> splitAnds = new ArrayList<>(expressions.size()); + for (var expression : expressions) { + var split = splitAnd(expression); + common = common == null ? split : inCommon(split, common); + if (common.isEmpty()) { + return Tuple.tuple(null, expressions); + } + splitAnds.add(split); + } + + List trimmed = new ArrayList<>(expressions.size()); + final List finalCommon = common; + splitAnds.forEach(split -> { + var subtracted = subtract(split, finalCommon); + trimmed.add(subtracted.isEmpty() ? Literal.TRUE : combineAnd(subtracted)); + }); + return Tuple.tuple(combineAnd(common), trimmed); + } } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index f07517b344ce..716c679b2fc1 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -76,7 +76,7 @@ interface Injected { } tasks.named("test").configure { - if (BuildParams.isCi() == false) { + if (buildParams.isCi() == false) { systemProperty 'generateDocs', true def injected = project.objects.newInstance(Injected) doFirst { @@ -147,7 +147,7 @@ tasks.named("test").configure { * Enable QA/rest integration tests for snapshot builds only * * TODO: Enable for all builds upon this feature release * ****************************************************************/ -if (BuildParams.isSnapshotBuild()) { +if (buildParams.isSnapshotBuild()) { addQaCheckDependencies(project) } diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index fb47255e8d52..68c0e8e30f81 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -31,7 +31,7 @@ def supportedVersion = bwcVersion -> { return bwcVersion.onOrAfter(Version.fromString("8.11.0")) && bwcVersion != VersionProperties.elasticsearchVersion } -BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> def javaRestTest = tasks.register("v${bwcVersion}#javaRestTest", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle index 77497597a18c..2c432eb94ebf 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle @@ -23,7 +23,7 @@ def supportedVersion = bwcVersion -> { return bwcVersion.onOrAfter(Version.fromString("8.13.0")); } -BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 2913401d8aab..d6715a932c07 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -29,6 +29,7 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -342,7 +343,7 @@ public String toString() { public static final Configuration TEST_CFG = configuration(new QueryPragmas(Settings.EMPTY)); - public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry())); + public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)); private EsqlTestUtils() {} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 96aa779ad38c..7a046786a4f1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2468,6 +2468,7 @@ count:long |values:keyword |job_positions:keyword ; prunedStatsFollowedByStats +required_capability: per_agg_filtering from employees | eval my_length = length(concat(first_name, null)) | stats count = count(my_length) where false, @@ -2641,6 +2642,57 @@ c2:l |c2_f:l |m2:i |m2_f:i |c:l 1 |1 |5 |5 |21 ; +commonFilterExtractionWithAliasing +required_capability: per_agg_filtering +from employees +| eval eno = emp_no +| drop emp_no +| stats min_sal = min(salary) where eno <= 10010, + min_hei = min(height) where eno <= 10010 +; + +min_sal:integer |min_hei:double +36174 |1.56 +; + +commonFilterExtractionWithAliasAndOriginal +required_capability: per_agg_filtering +from employees +| eval eno = emp_no +| stats min_sal = min(salary) where eno <= 10010, + min_hei = min(height) where emp_no <= 10010 +; + +// same results as above in commonFilterExtractionWithAliasing +min_sal:integer |min_hei:double +36174 |1.56 +; + +commonFilterExtractionWithAliasAndOriginalNeedingNormalization +required_capability: per_agg_filtering +from employees +| eval eno = emp_no +| stats min_sal = min(salary) where eno <= 10010, + min_hei = min(height) where emp_no <= 10010, + max_hei = max(height) where 10010 >= emp_no +; + +min_sal:integer |min_hei:double |max_hei:double +36174 |1.56 |2.1 +; + +commonFilterExtractionWithAliasAndOriginalNeedingNormalizationAndSimplification +required_capability: per_agg_filtering +from employees +| eval eno = emp_no +| stats min_sal = min(salary) where eno <= 10010, + min_hei = min(height) where not (emp_no > 10010), + max_hei = max(height) where 10010 >= emp_no +; + +min_sal:integer |min_hei:double |max_hei:double +36174 |1.56 |2.1 +; statsByConstantExpressionNoAggs required_capability: fix_stats_by_foldable_expression diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 49ba18023399..d6312006f4c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; @@ -83,9 +84,11 @@ public class Verifier { private final Metrics metrics; + private final XPackLicenseState licenseState; - public Verifier(Metrics metrics) { + public Verifier(Metrics metrics, XPackLicenseState licenseState) { this.metrics = metrics; + this.licenseState = licenseState; } /** @@ -202,6 +205,10 @@ else if (p instanceof Lookup lookup) { }); checkRemoteEnrich(plan, failures); + if (failures.isEmpty()) { + checkLicense(plan, licenseState, failures); + } + // gather metrics if (failures.isEmpty()) { gatherMetrics(plan, partialMetrics); @@ -547,6 +554,14 @@ private static void checkBinaryComparison(LogicalPlan p, Set failures) }); } + private void checkLicense(LogicalPlan plan, XPackLicenseState licenseState, Set failures) { + plan.forEachExpressionDown(Function.class, p -> { + if (p.checkLicense(licenseState) == false) { + failures.add(new Failure(p, "current license is non-compliant for function [" + p.sourceText() + "]")); + } + }); + } + private void gatherMetrics(LogicalPlan plan, BitSet b) { plan.forEachDown(p -> FeatureMetric.set(p, b)); for (int i = b.nextSetBit(0); i >= 0; i = b.nextSetBit(i + 1)) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 816388193c5f..c1269009c6a4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; @@ -40,13 +41,13 @@ public class PlanExecutor { private final Verifier verifier; private final PlanningMetricsManager planningMetricsManager; - public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry) { + public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XPackLicenseState licenseState) { this.indexResolver = indexResolver; this.preAnalyzer = new PreAnalyzer(); this.functionRegistry = new EsqlFunctionRegistry(); this.mapper = new Mapper(); this.metrics = new Metrics(functionRegistry); - this.verifier = new Verifier(metrics); + this.verifier = new Verifier(metrics, licenseState); this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index a0e257d1a895..5007b011092f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.CombineProjections; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ConstantFolding; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ConvertStringToByteRef; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.ExtractAggregateCommonFilter; import org.elasticsearch.xpack.esql.optimizer.rules.logical.FoldNull; import org.elasticsearch.xpack.esql.optimizer.rules.logical.LiteralsOnTheRight; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PartiallyFoldCase; @@ -124,8 +125,9 @@ protected static Batch substitutions() { "Substitutions", Limiter.ONCE, new SubstituteSurrogatePlans(), - // translate filtered expressions into aggregate with filters - can't use surrogate expressions because it was - // retrofitted for constant folding - this needs to be fixed + // Translate filtered expressions into aggregate with filters - can't use surrogate expressions because it was + // retrofitted for constant folding - this needs to be fixed. + // Needs to occur before ReplaceAggregateAggExpressionWithEval, which will update the functions, losing the filter. new SubstituteFilteredExpression(), new RemoveStatsOverride(), // first extract nested expressions inside aggs @@ -170,8 +172,10 @@ protected static Batch operators() { new BooleanFunctionEqualsElimination(), new CombineBinaryComparisons(), new CombineDisjunctions(), + // TODO: bifunction can now (since we now have just one data types set) be pushed into the rule new SimplifyComparisonsArithmetics(DataType::areCompatible), new ReplaceStatsFilteredAggWithEval(), + new ExtractAggregateCommonFilter(), // prune/elimination new PruneFilters(), new PruneColumns(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ExtractAggregateCommonFilter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ExtractAggregateCommonFilter.java new file mode 100644 index 000000000000..f00a8103f913 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ExtractAggregateCommonFilter.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules.logical; + +import org.elasticsearch.xpack.esql.core.expression.Alias; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.extractCommon; + +/** + * Extract a per-function expression filter applied to all the aggs as a query {@link Filter}, when no groups are provided. + *

+ * Example: + *

+ *         ... | STATS MIN(a) WHERE b > 0, MIN(c) WHERE b > 0 | ...
+ *         =>
+ *         ... | WHERE b > 0 | STATS MIN(a), MIN(c) | ...
+ *     
+ */ +public final class ExtractAggregateCommonFilter extends OptimizerRules.OptimizerRule { + public ExtractAggregateCommonFilter() { + super(OptimizerRules.TransformDirection.UP); + } + + @Override + protected LogicalPlan rule(Aggregate aggregate) { + if (aggregate.groupings().isEmpty() == false) { + return aggregate; // no optimization for grouped stats + } + + // collect all filters from the agg functions + List filters = new ArrayList<>(aggregate.aggregates().size()); + for (NamedExpression ne : aggregate.aggregates()) { + if (ne instanceof Alias alias && alias.child() instanceof AggregateFunction aggFunction && aggFunction.hasFilter()) { + filters.add(aggFunction.filter()); + } else { + return aggregate; // (at least one) agg function has no filter -- skip optimization + } + } + + // extract common filters + var common = extractCommon(filters); + if (common.v1() == null) { // no common filter + return aggregate; + } + + // replace agg functions' filters with trimmed ones + var newFilters = common.v2(); + List newAggs = new ArrayList<>(aggregate.aggregates().size()); + for (int i = 0; i < aggregate.aggregates().size(); i++) { + var alias = (Alias) aggregate.aggregates().get(i); + var newChild = ((AggregateFunction) alias.child()).withFilter(newFilters.get(i)); + newAggs.add(alias.replaceChild(newChild)); + } + + // build the new agg on top of extracted filter + return new Aggregate( + aggregate.source(), + new Filter(aggregate.source(), aggregate.child(), common.v1()), + aggregate.aggregateType(), + aggregate.groupings(), + newAggs + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index e9b9f571e880..b091ab0c1baf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -38,6 +38,7 @@ import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; import org.elasticsearch.compute.operator.topn.TopNOperatorStatus; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; @@ -45,6 +46,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.esql.EsqlInfoTransportAction; @@ -116,7 +118,7 @@ public Collection createComponents(PluginServices services) { BlockFactory blockFactory = new BlockFactory(circuitBreaker, bigArrays, maxPrimitiveArrayBlockSize); setupSharedSecrets(); return List.of( - new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry()), + new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry(), getLicenseState()), new ExchangeService(services.clusterService().getSettings(), services.threadPool(), ThreadPool.Names.SEARCH, blockFactory), blockFactory ); @@ -131,6 +133,11 @@ private void setupSharedSecrets() { } } + // to be overriden by tests + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } + /** * The settings defined by the ESQL plugin. * diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java new file mode 100644 index 000000000000..98f36d339976 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.TestUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.analysis.Analyzer; +import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; +import org.elasticsearch.xpack.esql.analysis.Verifier; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.stats.Metrics; + +import java.util.List; + +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzerDefaultMapping; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultEnrichResolution; +import static org.hamcrest.Matchers.containsString; + +public class CheckLicenseTests extends ESTestCase { + + private final EsqlParser parser = new EsqlParser(); + private final String esql = "from tests | eval license() | LIMIT 10"; + + public void testLicense() { + for (License.OperationMode functionLicense : License.OperationMode.values()) { + final LicensedFeature functionLicenseFeature = random().nextBoolean() + ? LicensedFeature.momentary("test", "license", functionLicense) + : LicensedFeature.persistent("test", "license", functionLicense); + final EsqlFunctionRegistry.FunctionBuilder builder = (source, expression, cfg) -> { + final LicensedFunction licensedFunction = new LicensedFunction(source); + licensedFunction.setLicensedFeature(functionLicenseFeature); + return licensedFunction; + }; + for (License.OperationMode operationMode : License.OperationMode.values()) { + if (License.OperationMode.TRIAL != operationMode && License.OperationMode.compare(operationMode, functionLicense) < 0) { + // non-compliant license + final VerificationException ex = expectThrows(VerificationException.class, () -> analyze(builder, operationMode)); + assertThat(ex.getMessage(), containsString("current license is non-compliant for function [license()]")); + } else { + // compliant license + assertNotNull(analyze(builder, operationMode)); + } + } + } + } + + private LogicalPlan analyze(EsqlFunctionRegistry.FunctionBuilder builder, License.OperationMode operationMode) { + final FunctionDefinition def = EsqlFunctionRegistry.def(LicensedFunction.class, builder, "license"); + final EsqlFunctionRegistry registry = new EsqlFunctionRegistry(def) { + @Override + public EsqlFunctionRegistry snapshotRegistry() { + return this; + } + }; + return analyzer(registry, operationMode).analyze(parser.createStatement(esql)); + } + + private static Analyzer analyzer(EsqlFunctionRegistry registry, License.OperationMode operationMode) { + return new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, registry, analyzerDefaultMapping(), defaultEnrichResolution()), + new Verifier(new Metrics(new EsqlFunctionRegistry()), getLicenseState(operationMode)) + ); + } + + private static XPackLicenseState getLicenseState(License.OperationMode operationMode) { + final TestUtils.UpdatableLicenseState licenseState = new TestUtils.UpdatableLicenseState(); + licenseState.update(new XPackLicenseStatus(operationMode, true, null)); + return licenseState; + } + + // It needs to be public because we run validation on it via reflection in org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests. + // This test prevents to add the license as constructor parameter too. + public static class LicensedFunction extends Function { + + private LicensedFeature licensedFeature; + + public LicensedFunction(Source source) { + super(source, List.of()); + } + + void setLicensedFeature(LicensedFeature licensedFeature) { + this.licensedFeature = licensedFeature; + } + + @Override + public boolean checkLicense(XPackLicenseState state) { + if (licensedFeature instanceof LicensedFeature.Momentary momentary) { + return momentary.check(state); + } else { + return licensedFeature.checkWithoutTracking(state); + } + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + + @Override + public Expression replaceChildren(List newChildren) { + throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + + @Override + public void writeTo(StreamOutput out) { + throw new UnsupportedOperationException(); + } + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 073a51ee6911..2134e16b0013 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; @@ -145,7 +146,7 @@ private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichRes return new Analyzer( new AnalyzerContext(config, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), - new Verifier(new Metrics(new EsqlFunctionRegistry())) + new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index d9a0f9ad57fb..c29f111488f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -840,6 +840,265 @@ public void testReplaceStatsFilteredAggWithEvalSingleAggWithGroup() { var source = as(aggregate.child(), EsRelation.class); } + public void testExtractStatsCommonFilter() { + var plan = plan(""" + from test + | stats m = min(salary) where emp_no > 1, + max(salary) where emp_no > 1 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + var filter = as(agg.child(), Filter.class); + assertThat(Expressions.name(filter.condition()), is("emp_no > 1")); + + var source = as(filter.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterUsingAliases() { + var plan = plan(""" + from test + | eval eno = emp_no + | drop emp_no + | stats min(salary) where eno > 1, + max(salary) where eno > 1 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + var filter = as(agg.child(), Filter.class); + assertThat(Expressions.name(filter.condition()), is("eno > 1")); + + var source = as(filter.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterUsingJustOneAlias() { + var plan = plan(""" + from test + | eval eno = emp_no + | stats min(salary) where emp_no > 1, + max(salary) where eno > 1 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + var filter = as(agg.child(), Filter.class); + var gt = as(filter.condition(), GreaterThan.class); + assertThat(Expressions.name(gt.left()), is("emp_no")); + assertTrue(gt.right().foldable()); + assertThat(gt.right().fold(), is(1)); + + var source = as(filter.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterSkippedNotSameFilter() { + var plan = plan(""" + from test + | stats min(salary) where emp_no > 1, + max(salary) where emp_no > 2 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), instanceOf(BinaryComparison.class)); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), instanceOf(BinaryComparison.class)); + + var source = as(agg.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterSkippedOnLackingFilter() { + var plan = plan(""" + from test + | stats min(salary), + max(salary) where emp_no > 2 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), instanceOf(BinaryComparison.class)); + + var source = as(agg.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterSkippedWithGroups() { + var plan = plan(""" + from test + | stats min(salary) where emp_no > 2, + max(salary) where emp_no > 2 by first_name + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(3)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), instanceOf(BinaryComparison.class)); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), instanceOf(BinaryComparison.class)); + + var source = as(agg.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterNormalizeAndCombineWithExistingFilter() { + var plan = plan(""" + from test + | where emp_no > 3 + | stats min(salary) where emp_no > 2, + max(salary) where 2 < emp_no + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), is(Literal.TRUE)); + + var filter = as(agg.child(), Filter.class); + assertThat(Expressions.name(filter.condition()), is("emp_no > 3")); + + var source = as(filter.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterInConjunction() { + var plan = plan(""" + from test + | stats min(salary) where emp_no > 2 and first_name == "John", + max(salary) where emp_no > 1 + 1 and length(last_name) < 19 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(Expressions.name(aggFunc.filter()), is("first_name == \"John\"")); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(Expressions.name(aggFunc.filter()), is("length(last_name) < 19")); + + var filter = as(agg.child(), Filter.class); + var gt = as(filter.condition(), GreaterThan.class); // name is "emp_no > 1 + 1" + assertThat(Expressions.name(gt.left()), is("emp_no")); + assertTrue(gt.right().foldable()); + assertThat(gt.right().fold(), is(2)); + + var source = as(filter.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterInConjunctionWithMultipleCommonConjunctions() { + var plan = plan(""" + from test + | stats min(salary) where emp_no < 10 and first_name == "John" and last_name == "Doe", + max(salary) where emp_no - 1 < 2 + 7 and length(last_name) < 19 and last_name == "Doe" + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(Expressions.name(aggFunc.filter()), is("first_name == \"John\"")); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(Expressions.name(aggFunc.filter()), is("length(last_name) < 19")); + + var filter = as(agg.child(), Filter.class); + var and = as(filter.condition(), And.class); + + var lt = as(and.left(), LessThan.class); + assertThat(Expressions.name(lt.left()), is("emp_no")); + assertTrue(lt.right().foldable()); + assertThat(lt.right().fold(), is(10)); + + var equals = as(and.right(), Equals.class); + assertThat(Expressions.name(equals.left()), is("last_name")); + assertTrue(equals.right().foldable()); + assertThat(equals.right().fold(), is(BytesRefs.toBytesRef("Doe"))); + + var source = as(filter.child(), EsRelation.class); + } + + public void testExtractStatsCommonFilterSkippedDueToDisjunction() { + // same query as in testExtractStatsCommonFilterInConjunction, except for the OR in the filter + var plan = plan(""" + from test + | stats min(salary) where emp_no > 2 OR first_name == "John", + max(salary) where emp_no > 1 + 1 and length(last_name) < 19 + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.aggregates().size(), is(2)); + + var alias = as(agg.aggregates().get(0), Alias.class); + var aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), instanceOf(Or.class)); + + alias = as(agg.aggregates().get(1), Alias.class); + aggFunc = as(alias.child(), AggregateFunction.class); + assertThat(aggFunc.filter(), instanceOf(And.class)); + + var source = as(agg.child(), EsRelation.class); + } + public void testQlComparisonOptimizationsApply() { var plan = plan(""" from test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index cf90cf96fe68..57210fda07f2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -46,7 +47,7 @@ private static Analyzer makeAnalyzer(String mappingFileName) { return new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, new EnrichResolution()), - new Verifier(new Metrics(new EsqlFunctionRegistry())) + new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 116df21a33ac..b323efad2b4c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.IndexMode; import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -102,7 +103,7 @@ public void testFailedMetric() { return null; }).when(esqlClient).execute(eq(EsqlResolveFieldsAction.TYPE), any(), any()); - var planExecutor = new PlanExecutor(indexResolver, MeterRegistry.NOOP); + var planExecutor = new PlanExecutor(indexResolver, MeterRegistry.NOOP, new XPackLicenseState(() -> 0L)); var enrichResolver = mockEnrichResolver(); var request = new EsqlQueryRequest(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index 5e6588d2295f..eda906b14795 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.stats; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; import org.elasticsearch.xpack.esql.analysis.Verifier; @@ -205,7 +206,7 @@ public void testTwoWhereQuery() { public void testTwoQueriesExecuted() { Metrics metrics = new Metrics(new EsqlFunctionRegistry()); - Verifier verifier = new Verifier(metrics); + Verifier verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); esqlWithVerifier(""" from employees | where languages > 2 @@ -252,7 +253,7 @@ public void testTwoQueriesExecuted() { public void testMultipleFunctions() { Metrics metrics = new Metrics(new EsqlFunctionRegistry()); - Verifier verifier = new Verifier(metrics); + Verifier verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); esqlWithVerifier(""" from employees | where languages > 2 @@ -526,7 +527,7 @@ private Counters esql(String esql, Verifier v) { Metrics metrics = null; if (v == null) { metrics = new Metrics(new EsqlFunctionRegistry()); - verifier = new Verifier(metrics); + verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); } analyzer(verifier).analyze(parser.createStatement(esql)); diff --git a/x-pack/plugin/fleet/qa/rest/build.gradle b/x-pack/plugin/fleet/qa/rest/build.gradle index dec624bc3cc5..0959e883997d 100644 --- a/x-pack/plugin/fleet/qa/rest/build.gradle +++ b/x-pack/plugin/fleet/qa/rest/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-test-artifact' @@ -27,7 +25,7 @@ tasks.named('yamlRestTest') { tasks.named('yamlRestCompatTest') { usesDefaultDistribution() } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("yamlRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/identity-provider/build.gradle b/x-pack/plugin/identity-provider/build.gradle index dd085e62efa4..f3b0def7eee9 100644 --- a/x-pack/plugin/identity-provider/build.gradle +++ b/x-pack/plugin/identity-provider/build.gradle @@ -281,7 +281,7 @@ tasks.named("thirdPartyAudit").configure { addQaCheckDependencies(project) -if (BuildParams.inFipsJvm) { +if (buildParams.inFipsJvm) { // We don't support the IDP in FIPS-140 mode, so no need to run tests tasks.named("test").configure { enabled = false } } diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle index 46e705ce2724..b109c0118172 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle @@ -48,6 +48,6 @@ testClusters.configureEach { // We don't support the IDP in FIPS-140 mode, so no need to run java rest tests tasks.named("javaRestTest").configure { - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle index 111496669afe..256225c5ef3b 100644 --- a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle @@ -59,5 +59,5 @@ testClusters.matching{ it.name == 'follow-cluster' }.configureEach { tasks.named("check").configure { dependsOn 'follow-cluster' } // Security is explicitly disabled for follow-cluster and leader-cluster, do not run these in FIPS mode tasks.withType(Test).configureEach { - enabled = BuildParams.inFipsJvm == false + enabled = buildParams.inFipsJvm == false } diff --git a/x-pack/plugin/ilm/qa/multi-node/build.gradle b/x-pack/plugin/ilm/qa/multi-node/build.gradle index 8712af84ac24..d420ac9effdd 100644 --- a/x-pack/plugin/ilm/qa/multi-node/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-node/build.gradle @@ -40,7 +40,7 @@ testClusters.configureEach { setting 'time_series.poll_interval', '10m' } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/ilm/src/main/java/module-info.java b/x-pack/plugin/ilm/src/main/java/module-info.java index 591c9786247e..aa24c2d6f333 100644 --- a/x-pack/plugin/ilm/src/main/java/module-info.java +++ b/x-pack/plugin/ilm/src/main/java/module-info.java @@ -18,6 +18,4 @@ provides org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider with org.elasticsearch.xpack.ilm.ReservedLifecycleStateHandlerProvider; - - provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.xpack.ilm.IndexLifecycleFeatures; } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index f41524480e2d..f830a2821d84 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -146,7 +146,6 @@ public Collection createComponents(PluginServices services) { ILMHistoryTemplateRegistry ilmTemplateRegistry = new ILMHistoryTemplateRegistry( settings, services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java deleted file mode 100644 index cc78271e2d87..000000000000 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ilm; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry; - -import java.util.Map; - -public class IndexLifecycleFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of(ILMHistoryTemplateRegistry.MANAGED_BY_DATA_STREAM_LIFECYCLE, Version.V_8_12_0); - } -} diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java index 28c28ef6e4c5..5633033e6faa 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java @@ -8,12 +8,9 @@ package org.elasticsearch.xpack.ilm.history; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -40,13 +37,11 @@ public class ILMHistoryTemplateRegistry extends IndexTemplateRegistry { // version 6: manage by data stream lifecycle // version 7: version the index template name so we can upgrade existing deployments public static final int INDEX_TEMPLATE_VERSION = 7; - public static final NodeFeature MANAGED_BY_DATA_STREAM_LIFECYCLE = new NodeFeature("ilm-history-managed-by-dsl"); public static final String ILM_TEMPLATE_VERSION_VARIABLE = "xpack.ilm_history.template.version"; public static final String ILM_TEMPLATE_NAME = "ilm-history-" + INDEX_TEMPLATE_VERSION; public static final String ILM_POLICY_NAME = "ilm-history-ilm-policy"; - private final FeatureService featureService; @Override protected boolean requiresMasterNode() { @@ -58,13 +53,11 @@ protected boolean requiresMasterNode() { public ILMHistoryTemplateRegistry( Settings nodeSettings, ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; this.ilmHistoryEnabled = LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING.get(nodeSettings); } @@ -104,9 +97,4 @@ protected List getLifecyclePolicies() { protected String getOrigin() { return ClientHelper.INDEX_LIFECYCLE_ORIGIN; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), MANAGED_BY_DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification deleted file mode 100644 index 1bf03ae25edd..000000000000 --- a/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0; you may not use this file except in compliance with the Elastic License -# 2.0. -# - -org.elasticsearch.xpack.ilm.IndexLifecycleFeatures diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java index cbdda089e832..1797f6b10f3c 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ClusterServiceUtils; @@ -40,7 +39,6 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xpack.ilm.IndexLifecycleFeatures; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; @@ -80,7 +78,6 @@ public void setup() { ILMHistoryTemplateRegistry registry = new ILMHistoryTemplateRegistry( clusterService.getSettings(), clusterService, - new FeatureService(List.of(new IndexLifecycleFeatures())), threadPool, client, NamedXContentRegistry.EMPTY diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 1fef26989d84..69767ce0b24f 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -8,6 +8,9 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; @@ -16,9 +19,12 @@ import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; @@ -47,7 +53,6 @@ public void tearDown() throws Exception { @SuppressWarnings("unchecked") public void testInferDeploysDefaultElser() throws IOException { - assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); var model = getModel(ElasticsearchInternalService.DEFAULT_ELSER_ID); assertDefaultElserConfig(model); @@ -78,7 +83,6 @@ private static void assertDefaultElserConfig(Map modelConfig) { @SuppressWarnings("unchecked") public void testInferDeploysDefaultE5() throws IOException { - assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); var model = getModel(ElasticsearchInternalService.DEFAULT_E5_ID); assertDefaultE5Config(model); @@ -110,4 +114,37 @@ private static void assertDefaultE5Config(Map modelConfig) { Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 32)) ); } + + public void testMultipleInferencesTriggeringDownloadAndDeploy() throws InterruptedException { + int numParallelRequests = 4; + var latch = new CountDownLatch(numParallelRequests); + var errors = new ArrayList(); + + var listener = new ResponseListener() { + @Override + public void onSuccess(Response response) { + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + errors.add(exception); + latch.countDown(); + } + }; + + var inputs = List.of("Hello World", "Goodnight moon"); + var queryParams = Map.of("timeout", "120s"); + for (int i = 0; i < numParallelRequests; i++) { + var request = createInferenceRequest( + Strings.format("_inference/%s", ElasticsearchInternalService.DEFAULT_ELSER_ID), + inputs, + queryParams + ); + client().performRequestAsync(request, listener); + } + + latch.await(); + assertThat(errors.toString(), errors, empty()); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 6790b9bb14c5..4e32ef99d06d 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -373,12 +373,17 @@ protected Map infer(String modelId, TaskType taskType, List inferInternal(String endpoint, List input, Map queryParameters) throws IOException { + protected Request createInferenceRequest(String endpoint, List input, Map queryParameters) { var request = new Request("POST", endpoint); request.setJsonEntity(jsonBody(input)); if (queryParameters.isEmpty() == false) { request.addParameters(queryParameters); } + return request; + } + + private Map inferInternal(String endpoint, List input, Map queryParameters) throws IOException { + var request = createInferenceRequest(endpoint, input, queryParameters); var response = client().performRequest(request); assertOkOrCreated(response); return entityAsMap(response); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 081c83b1e706..591db6db8495 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -44,18 +44,18 @@ public void testCRUD() throws IOException { } var getAllModels = getAllModels(); - int numModels = DefaultElserFeatureFlag.isEnabled() ? 11 : 9; + int numModels = 11; assertThat(getAllModels, hasSize(numModels)); var getSparseModels = getModels("_all", TaskType.SPARSE_EMBEDDING); - int numSparseModels = DefaultElserFeatureFlag.isEnabled() ? 6 : 5; + int numSparseModels = 6; assertThat(getSparseModels, hasSize(numSparseModels)); for (var sparseModel : getSparseModels) { assertEquals("sparse_embedding", sparseModel.get("task_type")); } var getDenseModels = getModels("_all", TaskType.TEXT_EMBEDDING); - int numDenseModels = DefaultElserFeatureFlag.isEnabled() ? 5 : 4; + int numDenseModels = 5; assertThat(getDenseModels, hasSize(numDenseModels)); for (var denseModel : getDenseModels) { assertEquals("text_embedding", denseModel.get("task_type")); diff --git a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle index 64edb196397a..c05e71fa1cd5 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle @@ -20,7 +20,7 @@ def supportedVersion = bwcVersion -> { return bwcVersion.onOrAfter(Version.fromString("8.11.0")) && bwcVersion != VersionProperties.elasticsearchVersion } -BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> def javaRestTest = tasks.register("v${bwcVersion}#javaRestTest", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle b/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle index 5d72fc96d98d..bfaff7c84d9a 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle @@ -20,7 +20,7 @@ dependencies { } // Inference API added in 8.11 -BuildParams.bwcVersions.withWireCompatible(v -> v.after("8.11.0")) { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible(v -> v.after("8.11.0")) { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 54d83af8f5d9..3b0fc869c812 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -30,8 +31,10 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.Locale; import java.util.Map; +import java.util.Set; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticTextInput; import static org.hamcrest.Matchers.equalTo; @@ -87,30 +90,38 @@ public void testBulkOperations() throws Exception { int totalBulkReqs = randomIntBetween(2, 100); long totalDocs = 0; + Set ids = new HashSet<>(); for (int bulkReqs = 0; bulkReqs < totalBulkReqs; bulkReqs++) { BulkRequestBuilder bulkReqBuilder = client().prepareBulk(); int totalBulkSize = randomIntBetween(1, 100); for (int bulkSize = 0; bulkSize < totalBulkSize; bulkSize++) { - String id = Long.toString(totalDocs); + if (ids.size() > 0 && rarely(random())) { + String id = randomFrom(ids); + ids.remove(id); + DeleteRequestBuilder request = new DeleteRequestBuilder(client(), INDEX_NAME).setId(id); + bulkReqBuilder.add(request); + continue; + } + String id = Long.toString(totalDocs++); boolean isIndexRequest = randomBoolean(); Map source = new HashMap<>(); source.put("sparse_field", isIndexRequest && rarely() ? null : randomSemanticTextInput()); source.put("dense_field", isIndexRequest && rarely() ? null : randomSemanticTextInput()); if (isIndexRequest) { bulkReqBuilder.add(new IndexRequestBuilder(client()).setIndex(INDEX_NAME).setId(id).setSource(source)); - totalDocs++; + ids.add(id); } else { boolean isUpsert = randomBoolean(); UpdateRequestBuilder request = new UpdateRequestBuilder(client()).setIndex(INDEX_NAME).setDoc(source); - if (isUpsert || totalDocs == 0) { + if (isUpsert || ids.size() == 0) { request.setDocAsUpsert(true); - totalDocs++; } else { // Update already existing document - id = Long.toString(randomLongBetween(0, totalDocs - 1)); + id = randomFrom(ids); } request.setId(id); bulkReqBuilder.add(request); + ids.add(id); } } BulkResponse bulkResponse = bulkReqBuilder.get(); @@ -135,7 +146,7 @@ public void testBulkOperations() throws Exception { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); SearchResponse searchResponse = client().search(new SearchRequest(INDEX_NAME).source(sourceBuilder)).get(); try { - assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(totalDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) ids.size())); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java deleted file mode 100644 index 2a764dabd62a..000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.util.FeatureFlag; - -public class DefaultElserFeatureFlag { - - private DefaultElserFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("inference_default_elser"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 10ffedef14e2..f70e7f367127 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; -import java.util.HashSet; import java.util.Set; /** @@ -24,23 +23,23 @@ public class InferenceFeatures implements FeatureSpecification { @Override public Set getFeatures() { - var features = new HashSet(); - features.add(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED); - features.add(RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED); - features.add(SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID); - features.add(SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS); - features.add(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED); - if (DefaultElserFeatureFlag.isEnabled()) { - features.add(SemanticTextFieldMapper.SEMANTIC_TEXT_DEFAULT_ELSER_2); - } - return Set.copyOf(features); + return Set.of( + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED, + RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED, + SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID, + SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS, + SemanticTextFieldMapper.SEMANTIC_TEXT_DEFAULT_ELSER_2, + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED + ); } @Override public Set getTestFeatures() { return Set.of( SemanticTextFieldMapper.SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX, - SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX + SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 02bddb6076d6..2320cca8295d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -63,6 +63,7 @@ import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalTextEmbeddingServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandRerankTaskSettings; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticRerankerServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserMlNodeTaskSettings; @@ -415,7 +416,13 @@ private static void addInternalNamedWriteables(List namedWriteables) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index f068caff805a..62405a2e9f7d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -227,10 +227,8 @@ public Collection createComponents(PluginServices services) { // reference correctly var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); registry.init(services.client()); - if (DefaultElserFeatureFlag.isEnabled()) { - for (var service : registry.getServices().values()) { - service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); - } + for (var service : registry.getServices().values()) { + service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); } inferenceServiceRegistry.set(registry); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index b3bbe3a7df9b..dd59230e575c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -413,8 +413,8 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons */ private Map> createFieldInferenceRequests(BulkShardRequest bulkShardRequest) { Map> fieldRequestsMap = new LinkedHashMap<>(); - int itemIndex = 0; - for (var item : bulkShardRequest.items()) { + for (int itemIndex = 0; itemIndex < bulkShardRequest.items().length; itemIndex++) { + var item = bulkShardRequest.items()[itemIndex]; if (item.getPrimaryResponse() != null) { // item was already aborted/processed by a filter in the chain upstream (e.g. security) continue; @@ -441,6 +441,7 @@ private Map> createFieldInferenceRequests(Bu // ignore delete request continue; } + final Map docMap = indexRequest.sourceAsMap(); for (var entry : fieldInferenceMap.values()) { String field = entry.getName(); @@ -483,7 +484,6 @@ private Map> createFieldInferenceRequests(Bu } } } - itemIndex++; } return fieldRequestsMap; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index f0cb612c9082..89a54ffe2917 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -57,7 +57,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import java.io.IOException; import java.util.ArrayList; @@ -70,6 +69,7 @@ import java.util.Set; import java.util.function.Function; +import static org.elasticsearch.search.SearchService.DEFAULT_SIZE; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_TEXT_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKS_FIELD; @@ -90,8 +90,9 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final NodeFeature SEMANTIC_TEXT_SEARCH_INFERENCE_ID = new NodeFeature("semantic_text.search_inference_id"); public static final NodeFeature SEMANTIC_TEXT_DEFAULT_ELSER_2 = new NodeFeature("semantic_text.default_elser_2"); public static final NodeFeature SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX = new NodeFeature("semantic_text.in_object_field_fix"); - public static final NodeFeature SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX = new NodeFeature("semantic_text.single_field_update_fix"); + public static final NodeFeature SEMANTIC_TEXT_DELETE_FIX = new NodeFeature("semantic_text.delete_fix"); + public static final NodeFeature SEMANTIC_TEXT_ZERO_SIZE_FIX = new NodeFeature("semantic_text.zero_size_fix"); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; @@ -111,16 +112,12 @@ public static class Builder extends FieldMapper.Builder { INFERENCE_ID_FIELD, false, mapper -> ((SemanticTextFieldType) mapper.fieldType()).inferenceId, - DefaultElserFeatureFlag.isEnabled() ? DEFAULT_ELSER_2_INFERENCE_ID : null + DEFAULT_ELSER_2_INFERENCE_ID ).addValidator(v -> { if (Strings.isEmpty(v)) { - // If the default ELSER feature flag is enabled, the only way we get here is if the user explicitly sets the param to an - // empty value. However, if the feature flag is disabled, we can get here if the user didn't set the param. - // Adjust the error message appropriately. - String message = DefaultElserFeatureFlag.isEnabled() - ? "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" - : "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must be specified"; - throw new IllegalArgumentException(message); + throw new IllegalArgumentException( + "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" + ); } }); @@ -512,7 +509,7 @@ public boolean fieldHasValue(FieldInfos fieldInfos) { return fieldInfos.fieldInfo(getEmbeddingsFieldName(name())) != null; } - public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost, String queryName) { + public QueryBuilder semanticQuery(InferenceResults inferenceResults, Integer requestSize, float boost, String queryName) { String nestedFieldPath = getChunksFieldName(name()); String inferenceResultsFieldName = getEmbeddingsFieldName(name()); QueryBuilder childQueryBuilder; @@ -556,7 +553,13 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost ); } - yield new KnnVectorQueryBuilder(inferenceResultsFieldName, inference, null, null, null); + Integer k = requestSize; + if (k != null) { + // Ensure that k is at least the default size so that aggregations work when size is set to 0 in the request + k = Math.max(k, DEFAULT_SIZE); + } + + yield new KnnVectorQueryBuilder(inferenceResultsFieldName, inference, k, null, null); } default -> throw new IllegalStateException( "Field [" diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 478f2e6a2186..d648db2fbfdb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -170,7 +170,7 @@ private QueryBuilder doRewriteBuildSemanticQuery(SearchExecutionContext searchEx ); } - return semanticTextFieldType.semanticQuery(inferenceResults, boost(), queryName()); + return semanticTextFieldType.semanticQuery(inferenceResults, searchExecutionContext.requestSize(), boost(), queryName()); } else { throw new IllegalArgumentException( "Field [" + fieldName + "] of type [" + fieldType.typeName() + "] does not support " + NAME + " queries" diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index 967ad4b46dcb..83b2a8a0f518 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -15,10 +15,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -69,11 +66,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient @Override public Set supportedCapabilities() { - Set capabilities = new HashSet<>(); - if (DefaultElserFeatureFlag.isEnabled()) { - capabilities.add(DEFAULT_ELSER_2_CAPABILITY); - } - - return Collections.unmodifiableSet(capabilities); + return Set.of(DEFAULT_ELSER_2_CAPABILITY); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index 922b366498c2..f743b94df381 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import java.io.IOException; @@ -157,6 +156,8 @@ protected void putModel(Model model, ActionListener listener) { putBuiltInModel(e5Model.getServiceSettings().modelId(), listener); } else if (model instanceof ElserInternalModel elserModel) { putBuiltInModel(elserModel.getServiceSettings().modelId(), listener); + } else if (model instanceof ElasticRerankerModel elasticRerankerModel) { + putBuiltInModel(elasticRerankerModel.getServiceSettings().modelId(), listener); } else if (model instanceof CustomElandModel) { logger.info("Custom eland model detected, model must have been already loaded into the cluster with eland."); listener.onResponse(Boolean.TRUE); @@ -296,11 +297,6 @@ protected void maybeStartDeployment( InferModelAction.Request request, ActionListener listener ) { - if (DefaultElserFeatureFlag.isEnabled() == false) { - listener.onFailure(e); - return; - } - if (isDefaultId(model.getInferenceEntityId()) && ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { this.start(model, request.getInferenceTimeout(), listener.delegateFailureAndWrap((l, started) -> { client.execute(InferModelAction.INSTANCE, request, listener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java index b710b24cbda3..b76de5eeedbf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java @@ -7,14 +7,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class CustomElandModel extends ElasticsearchInternalModel { @@ -39,31 +34,10 @@ public CustomElandModel( } @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the inference as the custom eland model [{0}] for this platform cannot be found." - + " Custom models need to be loaded into the cluster with eland before they can be started.", - internalServiceSettings.modelId() - ) - ); - return; - } - listener.onFailure(e); - } - }; + protected String modelNotFoundErrorMessage(String modelId) { + return "Could not deploy model [" + + modelId + + "] as the model cannot be found." + + " Custom models need to be loaded into the cluster with Eland before they can be started."; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java index 724c7a8f0a16..ce6c6258d039 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java @@ -36,6 +36,11 @@ public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentA throw new IllegalStateException("cannot start model that uses an existing deployment"); } + @Override + protected String modelNotFoundErrorMessage(String modelId) { + throw new IllegalStateException("cannot start model [" + modelId + "] that uses an existing deployment"); + } + @Override public ActionListener getCreateTrainedModelAssignmentActionListener( Model model, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java new file mode 100644 index 000000000000..115cc9f05599 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.ChunkingSettings; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +public class ElasticRerankerModel extends ElasticsearchInternalModel { + + public ElasticRerankerModel( + String inferenceEntityId, + TaskType taskType, + String service, + ElasticRerankerServiceSettings serviceSettings, + ChunkingSettings chunkingSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, chunkingSettings); + } + + @Override + public ElasticRerankerServiceSettings getServiceSettings() { + return (ElasticRerankerServiceSettings) super.getServiceSettings(); + } + + @Override + public ActionListener getCreateTrainedModelAssignmentActionListener( + Model model, + ActionListener listener + ) { + + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + listener.onFailure( + new ResourceNotFoundException("Could not start the Elastic Reranker Endpoint due to [{}]", e, e.getMessage()) + ); + return; + } + listener.onFailure(e); + } + }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java new file mode 100644 index 000000000000..316dc092e03c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; + +import java.io.IOException; +import java.util.Map; + +public class ElasticRerankerServiceSettings extends ElasticsearchInternalServiceSettings { + + public static final String NAME = "elastic_reranker_service_settings"; + + public ElasticRerankerServiceSettings(ElasticsearchInternalServiceSettings other) { + super(other); + } + + public ElasticRerankerServiceSettings( + Integer numAllocations, + int numThreads, + String modelId, + AdaptiveAllocationsSettings adaptiveAllocationsSettings + ) { + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + } + + public ElasticRerankerServiceSettings(StreamInput in) throws IOException { + super(in); + } + + /** + * Parse the ElasticRerankerServiceSettings from map and validate the setting values. + * + * If required setting are missing or the values are invalid an + * {@link ValidationException} is thrown. + * + * @param map Source map containing the config + * @return The builder + */ + public static Builder fromRequestMap(Map map) { + ValidationException validationException = new ValidationException(); + var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return baseSettings; + } + + @Override + public String getWriteableName() { + return ElasticRerankerServiceSettings.NAME; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index 2405243f302b..aa12bf0c645c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.core.TimeValue; @@ -15,8 +18,10 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; @@ -79,10 +84,38 @@ public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentA return startRequest; } - public abstract ActionListener getCreateTrainedModelAssignmentActionListener( + public ActionListener getCreateTrainedModelAssignmentActionListener( Model model, ActionListener listener - ); + ) { + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + var cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof ResourceNotFoundException) { + listener.onFailure(new ResourceNotFoundException(modelNotFoundErrorMessage(internalServiceSettings.modelId()))); + return; + } else if (cause instanceof ElasticsearchStatusException statusException) { + if (statusException.status() == RestStatus.CONFLICT + && statusException.getRootCause() instanceof ResourceAlreadyExistsException) { + // Deployment is already started + listener.onResponse(Boolean.TRUE); + } + return; + } + listener.onFailure(e); + } + }; + } + + protected String modelNotFoundErrorMessage(String modelId) { + return "Could not deploy model [" + modelId + "] as the model cannot be found."; + } public boolean usesExistingDeployment() { return internalServiceSettings.getDeploymentId() != null; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index fe83acc8574a..718aeae979fe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -97,6 +97,8 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 ); + public static final String RERANKER_ID = ".rerank-v1"; + public static final int EMBEDDING_MAX_BATCH_SIZE = 10; public static final String DEFAULT_ELSER_ID = ".elser-2-elasticsearch"; public static final String DEFAULT_E5_ID = ".multilingual-e5-small-elasticsearch"; @@ -223,6 +225,8 @@ public void parseRequestConfig( ) ) ); + } else if (RERANKER_ID.equals(modelId)) { + rerankerCase(inferenceEntityId, taskType, config, serviceSettingsMap, chunkingSettings, modelListener); } else { customElandCase(inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, chunkingSettings, modelListener); } @@ -323,6 +327,31 @@ private static CustomElandInternalServiceSettings elandServiceSettings( }; } + private void rerankerCase( + String inferenceEntityId, + TaskType taskType, + Map config, + Map serviceSettingsMap, + ChunkingSettings chunkingSettings, + ActionListener modelListener + ) { + + var esServiceSettingsBuilder = ElasticsearchInternalServiceSettings.fromRequestMap(serviceSettingsMap); + + throwIfNotEmptyMap(config, name()); + throwIfNotEmptyMap(serviceSettingsMap, name()); + + modelListener.onResponse( + new ElasticRerankerModel( + inferenceEntityId, + taskType, + NAME, + new ElasticRerankerServiceSettings(esServiceSettingsBuilder.build()), + chunkingSettings + ) + ); + } + private void e5Case( String inferenceEntityId, TaskType taskType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java index 8d2f59171a60..2594f18db3fb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java @@ -7,13 +7,8 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class ElserInternalModel extends ElasticsearchInternalModel { @@ -37,31 +32,4 @@ public ElserInternalServiceSettings getServiceSettings() { public ElserMlNodeTaskSettings getTaskSettings() { return (ElserMlNodeTaskSettings) super.getTaskSettings(); } - - @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the ELSER service as the ELSER model for this platform cannot be found." - + " ELSER needs to be downloaded before it can be started." - ) - ); - return; - } - listener.onFailure(e); - } - }; - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java index fee00d04d940..2dcf91140c99 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java @@ -7,13 +7,8 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class MultilingualE5SmallModel extends ElasticsearchInternalModel { @@ -31,34 +26,4 @@ public MultilingualE5SmallModel( public MultilingualE5SmallInternalServiceSettings getServiceSettings() { return (MultilingualE5SmallInternalServiceSettings) super.getServiceSettings(); } - - @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the TextEmbeddingService service as the " - + "Multilingual-E5-Small model for this platform cannot be found." - + " Multilingual-E5-Small needs to be downloaded before it can be started" - ) - ); - return; - } - listener.onFailure(e); - } - }; - } - } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index f444719c730f..6e58226f85f2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -61,7 +61,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.model.TestModel; import org.junit.AssumptionViolatedException; @@ -103,9 +102,6 @@ protected Collection getPlugins() { @Override protected void minimalMapping(XContentBuilder b) throws IOException { b.field("type", "semantic_text"); - if (DefaultElserFeatureFlag.isEnabled() == false) { - b.field("inference_id", "test_model"); - } } @Override @@ -175,9 +171,7 @@ public void testDefaults() throws Exception { DocumentMapper mapper = mapperService.documentMapper(); assertEquals(Strings.toString(fieldMapping), mapper.mappingSource().toString()); assertSemanticTextField(mapperService, fieldName, false); - if (DefaultElserFeatureFlag.isEnabled()) { - assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); - } + assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); ParsedDocument doc1 = mapper.parse(source(this::writeField)); List fields = doc1.rootDoc().getFields("field"); @@ -211,15 +205,13 @@ public void testSetInferenceEndpoints() throws IOException { assertSerialization.accept(fieldMapping, mapperService); } { - if (DefaultElserFeatureFlag.isEnabled()) { - final XContentBuilder fieldMapping = fieldMapping( - b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) - ); - final MapperService mapperService = createMapperService(fieldMapping); - assertSemanticTextField(mapperService, fieldName, false); - assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); - assertSerialization.accept(fieldMapping, mapperService); - } + final XContentBuilder fieldMapping = fieldMapping( + b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) + ); + final MapperService mapperService = createMapperService(fieldMapping); + assertSemanticTextField(mapperService, fieldName, false); + assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); + assertSerialization.accept(fieldMapping, mapperService); } { final XContentBuilder fieldMapping = fieldMapping( @@ -246,26 +238,18 @@ public void testInvalidInferenceEndpoints() { ); } { - final String expectedMessage = DefaultElserFeatureFlag.isEnabled() - ? "[inference_id] on mapper [field] of type [semantic_text] must not be empty" - : "[inference_id] on mapper [field] of type [semantic_text] must be specified"; Exception e = expectThrows( MapperParsingException.class, () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, ""))) ); - assertThat(e.getMessage(), containsString(expectedMessage)); + assertThat(e.getMessage(), containsString("[inference_id] on mapper [field] of type [semantic_text] must not be empty")); } { - if (DefaultElserFeatureFlag.isEnabled()) { - Exception e = expectThrows( - MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, ""))) - ); - assertThat( - e.getMessage(), - containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty") - ); - } + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, ""))) + ); + assertThat(e.getMessage(), containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidatorTests.java index 767dd4d64a7d..22ef35c3a46d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidatorTests.java @@ -71,8 +71,10 @@ public void testValidate_ServiceThrowsException() { any() ); - assertThrows(ElasticsearchStatusException.class, () -> { - underTest.validate(mockInferenceService, mockModel, mockActionListener);}); + assertThrows( + ElasticsearchStatusException.class, + () -> { underTest.validate(mockInferenceService, mockModel, mockActionListener); } + ); verifyCallToService(false); } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index 445df1dc302b..534e4831c4a0 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -551,7 +551,7 @@ setup: --- "Calculates embeddings using the default ELSER 2 endpoint": - requires: - reason: "default ELSER 2 inference ID is behind a feature flag" + reason: "default ELSER 2 inference ID is enabled via a capability" test_runner_features: [capabilities] capabilities: - method: GET @@ -624,3 +624,55 @@ setup: - match: { _source.level_1.dense_field.text: "another inference test" } - exists: _source.level_1.dense_field.inference.chunks.0.embeddings - match: { _source.level_1.dense_field.inference.chunks.0.text: "another inference test" } + +--- +"Deletes on bulk operation": + - requires: + cluster_features: semantic_text.delete_fix + reason: Delete operations are properly applied when subsequent operations include a semantic text field. + + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{"_id": "1"}} + {"dense_field": ["you know, for testing", "now with chunks"]} + {"index":{"_id": "2"}} + {"dense_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + semantic: + field: dense_field + query: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.dense_field.text: ["you know, for testing", "now with chunks"] } + - match: { hits.hits.1._source.dense_field.text: ["some more tests", "that include chunks"] } + + - do: + bulk: + index: test-index + refresh: true + body: | + {"delete":{ "_id": "2"}} + {"update":{"_id": "1"}} + {"doc":{"dense_field": "updated text"}} + + - do: + search: + index: test-index + body: + query: + semantic: + field: dense_field + query: "you know, for testing" + + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.dense_field.text: "updated text" } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 17938f3b61a4..c2704a4c2291 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -843,7 +843,7 @@ setup: --- "Query a field that uses the default ELSER 2 endpoint": - requires: - reason: "default ELSER 2 inference ID is behind a feature flag" + reason: "default ELSER 2 inference ID is enabled via a capability" test_runner_features: [capabilities] capabilities: - method: GET @@ -878,3 +878,117 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } + +--- +"Query using a sparse embedding model with size set to zero": + - requires: + cluster_features: "semantic_text.zero_size_fix" + reason: zero size fix added in 8.16.1 & 8.15.5 + + - do: + indices.create: + index: test-sparse-index-with-agg-id + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + non_inference_field: + type: text + agg_id: + type: keyword + + - do: + index: + index: test-sparse-index-with-agg-id + id: doc_1 + body: + inference_field: "inference test" + agg_id: "doc_1" + + - do: + index: + index: test-sparse-index-with-agg-id + id: doc_2 + body: + non_inference_field: "non-inference test" + agg_id: "doc_2" + refresh: true + + - do: + search: + index: test-sparse-index-with-agg-id + body: + size: 0 + query: + semantic: + field: "inference_field" + query: "inference test" + aggs: + agg_ids: + terms: + field: agg_id + + - match: { hits.total.value: 1 } + - length: { hits.hits: 0 } + - length: { aggregations.agg_ids.buckets: 1 } + - match: { aggregations.agg_ids.buckets.0.key: "doc_1" } + - match: { aggregations.agg_ids.buckets.0.doc_count: 1 } + +--- +"Query using a dense embedding model with size set to zero": + - requires: + cluster_features: "semantic_text.zero_size_fix" + reason: zero size fix added in 8.16.1 & 8.15.5 + + - do: + indices.create: + index: test-dense-index-with-agg-id + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + agg_id: + type: keyword + + - do: + index: + index: test-dense-index-with-agg-id + id: doc_1 + body: + inference_field: "inference test" + agg_id: "doc_1" + + - do: + index: + index: test-dense-index-with-agg-id + id: doc_2 + body: + non_inference_field: "non-inference test" + agg_id: "doc_2" + refresh: true + + - do: + search: + index: test-dense-index-with-agg-id + body: + size: 0 + query: + semantic: + field: "inference_field" + query: "inference test" + aggs: + agg_ids: + terms: + field: agg_id + + - match: { hits.total.value: 1 } + - length: { hits.hits: 0 } + - length: { aggregations.agg_ids.buckets: 1 } + - match: { aggregations.agg_ids.buckets.0.key: "doc_1" } + - match: { aggregations.agg_ids.buckets.0.doc_count: 1 } diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index 7e4df5654f22..054011a458fe 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -26,14 +26,14 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) } -tasks.named('yamlRestTest') { +tasks.named('yamlRestTest').configure { usesDefaultDistribution() -}.configure { + /**************************************************************** * Enable QA/rest integration tests for snapshot builds only * * TODO: Enable for all builds upon this feature release * ****************************************************************/ - enabled = BuildParams.isSnapshotBuild() + enabled = buildParams.isSnapshotBuild() } /********************************** diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 index da015b699cb1..739fa5eb0c6e 100644 --- a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 +++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 @@ -46,9 +46,26 @@ notQuery: ; nestedQuery - : fieldName COLON LEFT_CURLY_BRACKET query RIGHT_CURLY_BRACKET + : fieldName COLON LEFT_CURLY_BRACKET nestedSubQuery RIGHT_CURLY_BRACKET ; +nestedSubQuery + : nestedSubQuery operator=(AND|OR) nestedSubQuery #booleanNestedQuery + | nestedSimpleSubQuery #defaultNestedQuery + ; + +nestedSimpleSubQuery + : notQuery + | nestedQuery + | matchAllQuery + | nestedParenthesizedQuery + | existsQuery + | rangeQuery + | fieldQuery; + +nestedParenthesizedQuery + : LEFT_PARENTHESIS nestedSubQuery RIGHT_PARENTHESIS; + matchAllQuery : (WILDCARD COLON)? WILDCARD ; diff --git a/x-pack/plugin/kql/src/main/java/module-info.java b/x-pack/plugin/kql/src/main/java/module-info.java index 41e51033b9c7..e3bb6fb99bbd 100644 --- a/x-pack/plugin/kql/src/main/java/module-info.java +++ b/x-pack/plugin/kql/src/main/java/module-info.java @@ -13,6 +13,7 @@ requires org.apache.lucene.queryparser; requires org.elasticsearch.logging; requires org.apache.lucene.core; + requires org.apache.lucene.join; exports org.elasticsearch.xpack.kql; exports org.elasticsearch.xpack.kql.parser; diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java index 5fe3a61c0a76..2d810a33190c 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java @@ -9,6 +9,7 @@ import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -20,6 +21,7 @@ import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; +import java.util.List; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; @@ -56,15 +58,15 @@ public QueryBuilder toQueryBuilder(ParserRuleContext ctx) { @Override public QueryBuilder visitBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { assert ctx.operator != null; - return isAndQuery(ctx) ? visitAndBooleanQuery(ctx) : visitOrBooleanQuery(ctx); + return isAndQuery(ctx) ? visitAndBooleanQuery(ctx.query()) : visitOrBooleanQuery(ctx.query()); } - public QueryBuilder visitAndBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { + public QueryBuilder visitAndBooleanQuery(List clauses) { BoolQueryBuilder builder = QueryBuilders.boolQuery(); // TODO: KQLContext has an option to wrap the clauses into a filter instead of a must clause. Do we need it? - for (ParserRuleContext subQueryCtx : ctx.query()) { - if (subQueryCtx instanceof KqlBaseParser.BooleanQueryContext booleanSubQueryCtx && isAndQuery(booleanSubQueryCtx)) { + for (ParserRuleContext subQueryCtx : clauses) { + if (isAndQuery(subQueryCtx)) { typedParsing(this, subQueryCtx, BoolQueryBuilder.class).must().forEach(builder::must); } else { builder.must(typedParsing(this, subQueryCtx, QueryBuilder.class)); @@ -74,11 +76,11 @@ public QueryBuilder visitAndBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) return rewriteConjunctionQuery(builder); } - public QueryBuilder visitOrBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { + public QueryBuilder visitOrBooleanQuery(List clauses) { BoolQueryBuilder builder = QueryBuilders.boolQuery().minimumShouldMatch(1); - for (ParserRuleContext subQueryCtx : ctx.query()) { - if (subQueryCtx instanceof KqlBaseParser.BooleanQueryContext booleanSubQueryCtx && isOrQuery(booleanSubQueryCtx)) { + for (ParserRuleContext subQueryCtx : clauses) { + if (isOrQuery(subQueryCtx)) { typedParsing(this, subQueryCtx, BoolQueryBuilder.class).should().forEach(builder::should); } else { builder.should(typedParsing(this, subQueryCtx, QueryBuilder.class)); @@ -100,8 +102,40 @@ public QueryBuilder visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryCont @Override public QueryBuilder visitNestedQuery(KqlBaseParser.NestedQueryContext ctx) { - // TODO: implementation - return new MatchNoneQueryBuilder(); + String nestedFieldName = extractText(ctx.fieldName()); + + if (kqlParsingContext.isNestedField(nestedFieldName) == false) { + throw new KqlParsingException( + "[{}] is not a valid nested field name.", + ctx.start.getLine(), + ctx.start.getCharPositionInLine(), + nestedFieldName + ); + } + QueryBuilder subQuery = kqlParsingContext.withNestedPath( + nestedFieldName, + () -> typedParsing(this, ctx.nestedSubQuery(), QueryBuilder.class) + ); + + if (subQuery instanceof MatchNoneQueryBuilder) { + return subQuery; + } + + return wrapWithNestedQuery( + nestedFieldName, + QueryBuilders.nestedQuery(kqlParsingContext.fullFieldName(nestedFieldName), subQuery, ScoreMode.None) + ); + } + + @Override + public QueryBuilder visitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx) { + assert ctx.operator != null; + return isAndQuery(ctx) ? visitAndBooleanQuery(ctx.nestedSubQuery()) : visitOrBooleanQuery(ctx.nestedSubQuery()); + } + + @Override + public QueryBuilder visitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx) { + return typedParsing(this, ctx.nestedSubQuery(), QueryBuilder.class); } @Override @@ -116,7 +150,7 @@ public QueryBuilder visitExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().minimumShouldMatch(1); withFields(ctx.fieldName(), (fieldName, mappedFieldType) -> { if (isRuntimeField(mappedFieldType) == false) { - boolQueryBuilder.should(QueryBuilders.existsQuery(fieldName)); + boolQueryBuilder.should(wrapWithNestedQuery(fieldName, QueryBuilders.existsQuery(fieldName))); } }); @@ -137,7 +171,7 @@ public QueryBuilder visitRangeQuery(KqlBaseParser.RangeQueryContext ctx) { rangeQuery.timeZone(kqlParsingContext.timeZone().getId()); } - boolQueryBuilder.should(rangeQuery); + boolQueryBuilder.should(wrapWithNestedQuery(fieldName, rangeQuery)); }); return rewriteDisjunctionQuery(boolQueryBuilder); @@ -200,24 +234,33 @@ public QueryBuilder visitFieldQuery(KqlBaseParser.FieldQueryContext ctx) { } if (fieldQuery != null) { - boolQueryBuilder.should(fieldQuery); + boolQueryBuilder.should(wrapWithNestedQuery(fieldName, fieldQuery)); } }); return rewriteDisjunctionQuery(boolQueryBuilder); } - private static boolean isAndQuery(KqlBaseParser.BooleanQueryContext ctx) { - return ctx.operator.getType() == KqlBaseParser.AND; + private static boolean isAndQuery(ParserRuleContext ctx) { + return switch (ctx) { + case KqlBaseParser.BooleanQueryContext booleanQueryCtx -> booleanQueryCtx.operator.getType() == KqlBaseParser.AND; + case KqlBaseParser.BooleanNestedQueryContext booleanNestedCtx -> booleanNestedCtx.operator.getType() == KqlBaseParser.AND; + default -> false; + }; } - private static boolean isOrQuery(KqlBaseParser.BooleanQueryContext ctx) { - return ctx.operator.getType() == KqlBaseParser.OR; + private static boolean isOrQuery(ParserRuleContext ctx) { + return switch (ctx) { + case KqlBaseParser.BooleanQueryContext booleanQueryCtx -> booleanQueryCtx.operator.getType() == KqlBaseParser.OR; + case KqlBaseParser.BooleanNestedQueryContext booleanNestedCtx -> booleanNestedCtx.operator.getType() == KqlBaseParser.OR; + default -> false; + }; } private void withFields(KqlBaseParser.FieldNameContext ctx, BiConsumer fieldConsummer) { assert ctx != null : "Field ctx cannot be null"; String fieldNamePattern = extractText(ctx); + Set fieldNames = kqlParsingContext.resolveFieldNames(fieldNamePattern); if (ctx.value.getType() == KqlBaseParser.QUOTED_STRING && Regex.isSimpleMatchPattern(fieldNamePattern)) { @@ -267,4 +310,14 @@ private BiFunction rangeOperation( default -> throw new IllegalArgumentException(format(null, "Invalid range operator {}\"", operator.getText())); }; } + + private QueryBuilder wrapWithNestedQuery(String fieldName, QueryBuilder query) { + String nestedPath = kqlParsingContext.nestedPath(fieldName); + + if (nestedPath == null || nestedPath.equals(kqlParsingContext.currentNestedPath())) { + return query; + } + + return wrapWithNestedQuery(nestedPath, QueryBuilders.nestedQuery(nestedPath, query, ScoreMode.None)); + } } diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp index 7af37d7e3c3b..fbfe52afa4cd 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp @@ -42,6 +42,9 @@ query simpleQuery notQuery nestedQuery +nestedSubQuery +nestedSimpleSubQuery +nestedParenthesizedQuery matchAllQuery parenthesizedQuery rangeQuery @@ -54,4 +57,4 @@ fieldName atn: -[4, 1, 16, 136, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 1, 0, 3, 0, 30, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 40, 8, 1, 10, 1, 12, 1, 43, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 3, 5, 66, 8, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 4, 8, 79, 8, 8, 11, 8, 12, 8, 80, 1, 8, 3, 8, 84, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 100, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 107, 8, 11, 1, 12, 3, 12, 110, 8, 12, 1, 12, 4, 12, 113, 8, 12, 11, 12, 12, 12, 114, 1, 12, 3, 12, 118, 8, 12, 1, 12, 1, 12, 3, 12, 122, 8, 12, 1, 12, 1, 12, 3, 12, 126, 8, 12, 1, 12, 3, 12, 129, 8, 12, 1, 13, 1, 13, 1, 13, 3, 13, 134, 8, 13, 1, 13, 0, 1, 2, 14, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 145, 0, 29, 1, 0, 0, 0, 2, 33, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 54, 1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 65, 1, 0, 0, 0, 12, 69, 1, 0, 0, 0, 14, 73, 1, 0, 0, 0, 16, 83, 1, 0, 0, 0, 18, 85, 1, 0, 0, 0, 20, 99, 1, 0, 0, 0, 22, 106, 1, 0, 0, 0, 24, 128, 1, 0, 0, 0, 26, 133, 1, 0, 0, 0, 28, 30, 3, 2, 1, 0, 29, 28, 1, 0, 0, 0, 29, 30, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 32, 5, 0, 0, 1, 32, 1, 1, 0, 0, 0, 33, 34, 6, 1, -1, 0, 34, 35, 3, 4, 2, 0, 35, 41, 1, 0, 0, 0, 36, 37, 10, 2, 0, 0, 37, 38, 7, 0, 0, 0, 38, 40, 3, 2, 1, 2, 39, 36, 1, 0, 0, 0, 40, 43, 1, 0, 0, 0, 41, 39, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 3, 1, 0, 0, 0, 43, 41, 1, 0, 0, 0, 44, 53, 3, 6, 3, 0, 45, 53, 3, 8, 4, 0, 46, 53, 3, 12, 6, 0, 47, 53, 3, 10, 5, 0, 48, 53, 3, 18, 9, 0, 49, 53, 3, 14, 7, 0, 50, 53, 3, 20, 10, 0, 51, 53, 3, 22, 11, 0, 52, 44, 1, 0, 0, 0, 52, 45, 1, 0, 0, 0, 52, 46, 1, 0, 0, 0, 52, 47, 1, 0, 0, 0, 52, 48, 1, 0, 0, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 55, 5, 4, 0, 0, 55, 56, 3, 4, 2, 0, 56, 7, 1, 0, 0, 0, 57, 58, 3, 26, 13, 0, 58, 59, 5, 5, 0, 0, 59, 60, 5, 12, 0, 0, 60, 61, 3, 2, 1, 0, 61, 62, 5, 13, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 16, 0, 0, 64, 66, 5, 5, 0, 0, 65, 63, 1, 0, 0, 0, 65, 66, 1, 0, 0, 0, 66, 67, 1, 0, 0, 0, 67, 68, 5, 16, 0, 0, 68, 11, 1, 0, 0, 0, 69, 70, 5, 10, 0, 0, 70, 71, 3, 2, 1, 0, 71, 72, 5, 11, 0, 0, 72, 13, 1, 0, 0, 0, 73, 74, 3, 26, 13, 0, 74, 75, 7, 1, 0, 0, 75, 76, 3, 16, 8, 0, 76, 15, 1, 0, 0, 0, 77, 79, 7, 2, 0, 0, 78, 77, 1, 0, 0, 0, 79, 80, 1, 0, 0, 0, 80, 78, 1, 0, 0, 0, 80, 81, 1, 0, 0, 0, 81, 84, 1, 0, 0, 0, 82, 84, 5, 15, 0, 0, 83, 78, 1, 0, 0, 0, 83, 82, 1, 0, 0, 0, 84, 17, 1, 0, 0, 0, 85, 86, 3, 26, 13, 0, 86, 87, 5, 5, 0, 0, 87, 88, 5, 16, 0, 0, 88, 19, 1, 0, 0, 0, 89, 90, 3, 26, 13, 0, 90, 91, 5, 5, 0, 0, 91, 92, 3, 24, 12, 0, 92, 100, 1, 0, 0, 0, 93, 94, 3, 26, 13, 0, 94, 95, 5, 5, 0, 0, 95, 96, 5, 10, 0, 0, 96, 97, 3, 24, 12, 0, 97, 98, 5, 11, 0, 0, 98, 100, 1, 0, 0, 0, 99, 89, 1, 0, 0, 0, 99, 93, 1, 0, 0, 0, 100, 21, 1, 0, 0, 0, 101, 107, 3, 24, 12, 0, 102, 103, 5, 10, 0, 0, 103, 104, 3, 24, 12, 0, 104, 105, 5, 11, 0, 0, 105, 107, 1, 0, 0, 0, 106, 101, 1, 0, 0, 0, 106, 102, 1, 0, 0, 0, 107, 23, 1, 0, 0, 0, 108, 110, 7, 3, 0, 0, 109, 108, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 112, 1, 0, 0, 0, 111, 113, 7, 2, 0, 0, 112, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 112, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115, 117, 1, 0, 0, 0, 116, 118, 7, 3, 0, 0, 117, 116, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 129, 1, 0, 0, 0, 119, 121, 7, 0, 0, 0, 120, 122, 7, 3, 0, 0, 121, 120, 1, 0, 0, 0, 121, 122, 1, 0, 0, 0, 122, 129, 1, 0, 0, 0, 123, 125, 5, 4, 0, 0, 124, 126, 7, 0, 0, 0, 125, 124, 1, 0, 0, 0, 125, 126, 1, 0, 0, 0, 126, 129, 1, 0, 0, 0, 127, 129, 5, 15, 0, 0, 128, 109, 1, 0, 0, 0, 128, 119, 1, 0, 0, 0, 128, 123, 1, 0, 0, 0, 128, 127, 1, 0, 0, 0, 129, 25, 1, 0, 0, 0, 130, 134, 5, 14, 0, 0, 131, 134, 5, 15, 0, 0, 132, 134, 5, 16, 0, 0, 133, 130, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 133, 132, 1, 0, 0, 0, 134, 27, 1, 0, 0, 0, 15, 29, 41, 52, 65, 80, 83, 99, 106, 109, 114, 117, 121, 125, 128, 133] \ No newline at end of file +[4, 1, 16, 165, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 1, 0, 3, 0, 36, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 46, 8, 1, 10, 1, 12, 1, 49, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 59, 8, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 76, 8, 5, 10, 5, 12, 5, 79, 9, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 87, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 3, 8, 95, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 4, 11, 108, 8, 11, 11, 11, 12, 11, 109, 1, 11, 3, 11, 113, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 129, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 136, 8, 14, 1, 15, 3, 15, 139, 8, 15, 1, 15, 4, 15, 142, 8, 15, 11, 15, 12, 15, 143, 1, 15, 3, 15, 147, 8, 15, 1, 15, 1, 15, 3, 15, 151, 8, 15, 1, 15, 1, 15, 3, 15, 155, 8, 15, 1, 15, 3, 15, 158, 8, 15, 1, 16, 1, 16, 1, 16, 3, 16, 163, 8, 16, 1, 16, 0, 2, 2, 10, 17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 177, 0, 35, 1, 0, 0, 0, 2, 39, 1, 0, 0, 0, 4, 58, 1, 0, 0, 0, 6, 60, 1, 0, 0, 0, 8, 63, 1, 0, 0, 0, 10, 69, 1, 0, 0, 0, 12, 86, 1, 0, 0, 0, 14, 88, 1, 0, 0, 0, 16, 94, 1, 0, 0, 0, 18, 98, 1, 0, 0, 0, 20, 102, 1, 0, 0, 0, 22, 112, 1, 0, 0, 0, 24, 114, 1, 0, 0, 0, 26, 128, 1, 0, 0, 0, 28, 135, 1, 0, 0, 0, 30, 157, 1, 0, 0, 0, 32, 162, 1, 0, 0, 0, 34, 36, 3, 2, 1, 0, 35, 34, 1, 0, 0, 0, 35, 36, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 38, 5, 0, 0, 1, 38, 1, 1, 0, 0, 0, 39, 40, 6, 1, -1, 0, 40, 41, 3, 4, 2, 0, 41, 47, 1, 0, 0, 0, 42, 43, 10, 2, 0, 0, 43, 44, 7, 0, 0, 0, 44, 46, 3, 2, 1, 2, 45, 42, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45, 1, 0, 0, 0, 47, 48, 1, 0, 0, 0, 48, 3, 1, 0, 0, 0, 49, 47, 1, 0, 0, 0, 50, 59, 3, 6, 3, 0, 51, 59, 3, 8, 4, 0, 52, 59, 3, 18, 9, 0, 53, 59, 3, 16, 8, 0, 54, 59, 3, 24, 12, 0, 55, 59, 3, 20, 10, 0, 56, 59, 3, 26, 13, 0, 57, 59, 3, 28, 14, 0, 58, 50, 1, 0, 0, 0, 58, 51, 1, 0, 0, 0, 58, 52, 1, 0, 0, 0, 58, 53, 1, 0, 0, 0, 58, 54, 1, 0, 0, 0, 58, 55, 1, 0, 0, 0, 58, 56, 1, 0, 0, 0, 58, 57, 1, 0, 0, 0, 59, 5, 1, 0, 0, 0, 60, 61, 5, 4, 0, 0, 61, 62, 3, 4, 2, 0, 62, 7, 1, 0, 0, 0, 63, 64, 3, 32, 16, 0, 64, 65, 5, 5, 0, 0, 65, 66, 5, 12, 0, 0, 66, 67, 3, 10, 5, 0, 67, 68, 5, 13, 0, 0, 68, 9, 1, 0, 0, 0, 69, 70, 6, 5, -1, 0, 70, 71, 3, 12, 6, 0, 71, 77, 1, 0, 0, 0, 72, 73, 10, 2, 0, 0, 73, 74, 7, 0, 0, 0, 74, 76, 3, 10, 5, 2, 75, 72, 1, 0, 0, 0, 76, 79, 1, 0, 0, 0, 77, 75, 1, 0, 0, 0, 77, 78, 1, 0, 0, 0, 78, 11, 1, 0, 0, 0, 79, 77, 1, 0, 0, 0, 80, 87, 3, 6, 3, 0, 81, 87, 3, 8, 4, 0, 82, 87, 3, 14, 7, 0, 83, 87, 3, 24, 12, 0, 84, 87, 3, 20, 10, 0, 85, 87, 3, 26, 13, 0, 86, 80, 1, 0, 0, 0, 86, 81, 1, 0, 0, 0, 86, 82, 1, 0, 0, 0, 86, 83, 1, 0, 0, 0, 86, 84, 1, 0, 0, 0, 86, 85, 1, 0, 0, 0, 87, 13, 1, 0, 0, 0, 88, 89, 5, 10, 0, 0, 89, 90, 3, 10, 5, 0, 90, 91, 5, 11, 0, 0, 91, 15, 1, 0, 0, 0, 92, 93, 5, 16, 0, 0, 93, 95, 5, 5, 0, 0, 94, 92, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 97, 5, 16, 0, 0, 97, 17, 1, 0, 0, 0, 98, 99, 5, 10, 0, 0, 99, 100, 3, 2, 1, 0, 100, 101, 5, 11, 0, 0, 101, 19, 1, 0, 0, 0, 102, 103, 3, 32, 16, 0, 103, 104, 7, 1, 0, 0, 104, 105, 3, 22, 11, 0, 105, 21, 1, 0, 0, 0, 106, 108, 7, 2, 0, 0, 107, 106, 1, 0, 0, 0, 108, 109, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 113, 1, 0, 0, 0, 111, 113, 5, 15, 0, 0, 112, 107, 1, 0, 0, 0, 112, 111, 1, 0, 0, 0, 113, 23, 1, 0, 0, 0, 114, 115, 3, 32, 16, 0, 115, 116, 5, 5, 0, 0, 116, 117, 5, 16, 0, 0, 117, 25, 1, 0, 0, 0, 118, 119, 3, 32, 16, 0, 119, 120, 5, 5, 0, 0, 120, 121, 3, 30, 15, 0, 121, 129, 1, 0, 0, 0, 122, 123, 3, 32, 16, 0, 123, 124, 5, 5, 0, 0, 124, 125, 5, 10, 0, 0, 125, 126, 3, 30, 15, 0, 126, 127, 5, 11, 0, 0, 127, 129, 1, 0, 0, 0, 128, 118, 1, 0, 0, 0, 128, 122, 1, 0, 0, 0, 129, 27, 1, 0, 0, 0, 130, 136, 3, 30, 15, 0, 131, 132, 5, 10, 0, 0, 132, 133, 3, 30, 15, 0, 133, 134, 5, 11, 0, 0, 134, 136, 1, 0, 0, 0, 135, 130, 1, 0, 0, 0, 135, 131, 1, 0, 0, 0, 136, 29, 1, 0, 0, 0, 137, 139, 7, 3, 0, 0, 138, 137, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 141, 1, 0, 0, 0, 140, 142, 7, 2, 0, 0, 141, 140, 1, 0, 0, 0, 142, 143, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 146, 1, 0, 0, 0, 145, 147, 7, 3, 0, 0, 146, 145, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 158, 1, 0, 0, 0, 148, 150, 7, 0, 0, 0, 149, 151, 7, 3, 0, 0, 150, 149, 1, 0, 0, 0, 150, 151, 1, 0, 0, 0, 151, 158, 1, 0, 0, 0, 152, 154, 5, 4, 0, 0, 153, 155, 7, 0, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 158, 1, 0, 0, 0, 156, 158, 5, 15, 0, 0, 157, 138, 1, 0, 0, 0, 157, 148, 1, 0, 0, 0, 157, 152, 1, 0, 0, 0, 157, 156, 1, 0, 0, 0, 158, 31, 1, 0, 0, 0, 159, 163, 5, 14, 0, 0, 160, 163, 5, 15, 0, 0, 161, 163, 5, 16, 0, 0, 162, 159, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 162, 161, 1, 0, 0, 0, 163, 33, 1, 0, 0, 0, 17, 35, 47, 58, 77, 86, 94, 109, 112, 128, 135, 138, 143, 146, 150, 154, 157, 162] \ No newline at end of file diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java index e1015edcd493..c3fc1281b6fd 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java @@ -92,6 +92,54 @@ class KqlBaseBaseListener implements KqlBaseListener { *

The default implementation does nothing.

*/ @Override public void exitNestedQuery(KqlBaseParser.NestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java index 3973a647c8cd..84c882c2e2bc 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java @@ -62,6 +62,34 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitNestedQuery(KqlBaseParser.NestedQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java index 49f203120864..a44ecf1ecad2 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java @@ -79,6 +79,50 @@ interface KqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitNestedQuery(KqlBaseParser.NestedQueryContext ctx); + /** + * Enter a parse tree produced by the {@code booleanNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + */ + void enterBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx); + /** + * Exit a parse tree produced by the {@code booleanNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + */ + void exitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx); + /** + * Enter a parse tree produced by the {@code defaultNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + */ + void enterDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx); + /** + * Exit a parse tree produced by the {@code defaultNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + */ + void exitDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#nestedSimpleSubQuery}. + * @param ctx the parse tree + */ + void enterNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#nestedSimpleSubQuery}. + * @param ctx the parse tree + */ + void exitNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx); + /** + * Enter a parse tree produced by {@link KqlBaseParser#nestedParenthesizedQuery}. + * @param ctx the parse tree + */ + void enterNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx); + /** + * Exit a parse tree produced by {@link KqlBaseParser#nestedParenthesizedQuery}. + * @param ctx the parse tree + */ + void exitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx); /** * Enter a parse tree produced by {@link KqlBaseParser#matchAllQuery}. * @param ctx the parse tree diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java index 118ac32aadd6..7e797b9edbb9 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java @@ -30,12 +30,15 @@ class KqlBaseParser extends Parser { RIGHT_CURLY_BRACKET=13, UNQUOTED_LITERAL=14, QUOTED_STRING=15, WILDCARD=16; public static final int RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_notQuery = 3, - RULE_nestedQuery = 4, RULE_matchAllQuery = 5, RULE_parenthesizedQuery = 6, - RULE_rangeQuery = 7, RULE_rangeQueryValue = 8, RULE_existsQuery = 9, RULE_fieldQuery = 10, - RULE_fieldLessQuery = 11, RULE_fieldQueryValue = 12, RULE_fieldName = 13; + RULE_nestedQuery = 4, RULE_nestedSubQuery = 5, RULE_nestedSimpleSubQuery = 6, + RULE_nestedParenthesizedQuery = 7, RULE_matchAllQuery = 8, RULE_parenthesizedQuery = 9, + RULE_rangeQuery = 10, RULE_rangeQueryValue = 11, RULE_existsQuery = 12, + RULE_fieldQuery = 13, RULE_fieldLessQuery = 14, RULE_fieldQueryValue = 15, + RULE_fieldName = 16; private static String[] makeRuleNames() { return new String[] { - "topLevelQuery", "query", "simpleQuery", "notQuery", "nestedQuery", "matchAllQuery", + "topLevelQuery", "query", "simpleQuery", "notQuery", "nestedQuery", "nestedSubQuery", + "nestedSimpleSubQuery", "nestedParenthesizedQuery", "matchAllQuery", "parenthesizedQuery", "rangeQuery", "rangeQueryValue", "existsQuery", "fieldQuery", "fieldLessQuery", "fieldQueryValue", "fieldName" }; @@ -139,17 +142,17 @@ public final TopLevelQueryContext topLevelQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(29); + setState(35); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 115740L) != 0)) { { - setState(28); + setState(34); query(0); } } - setState(31); + setState(37); match(EOF); } } @@ -244,11 +247,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(34); + setState(40); simpleQuery(); } _ctx.stop = _input.LT(-1); - setState(41); + setState(47); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,1,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -259,9 +262,9 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new BooleanQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(36); + setState(42); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(37); + setState(43); ((BooleanQueryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { @@ -272,12 +275,12 @@ private QueryContext query(int _p) throws RecognitionException { _errHandler.reportMatch(this); consume(); } - setState(38); + setState(44); query(2); } } } - setState(43); + setState(49); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,1,_ctx); } @@ -343,62 +346,62 @@ public final SimpleQueryContext simpleQuery() throws RecognitionException { SimpleQueryContext _localctx = new SimpleQueryContext(_ctx, getState()); enterRule(_localctx, 4, RULE_simpleQuery); try { - setState(52); + setState(58); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(44); + setState(50); notQuery(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(45); + setState(51); nestedQuery(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(46); + setState(52); parenthesizedQuery(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(47); + setState(53); matchAllQuery(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(48); + setState(54); existsQuery(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(49); + setState(55); rangeQuery(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(50); + setState(56); fieldQuery(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(51); + setState(57); fieldLessQuery(); } break; @@ -447,9 +450,9 @@ public final NotQueryContext notQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(54); + setState(60); match(NOT); - setState(55); + setState(61); ((NotQueryContext)_localctx).subQuery = simpleQuery(); } } @@ -471,8 +474,8 @@ public FieldNameContext fieldName() { } public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); } public TerminalNode LEFT_CURLY_BRACKET() { return getToken(KqlBaseParser.LEFT_CURLY_BRACKET, 0); } - public QueryContext query() { - return getRuleContext(QueryContext.class,0); + public NestedSubQueryContext nestedSubQuery() { + return getRuleContext(NestedSubQueryContext.class,0); } public TerminalNode RIGHT_CURLY_BRACKET() { return getToken(KqlBaseParser.RIGHT_CURLY_BRACKET, 0); } public NestedQueryContext(ParserRuleContext parent, int invokingState) { @@ -500,15 +503,15 @@ public final NestedQueryContext nestedQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(57); + setState(63); fieldName(); - setState(58); + setState(64); match(COLON); - setState(59); + setState(65); match(LEFT_CURLY_BRACKET); - setState(60); - query(0); - setState(61); + setState(66); + nestedSubQuery(0); + setState(67); match(RIGHT_CURLY_BRACKET); } } @@ -523,6 +526,288 @@ public final NestedQueryContext nestedQuery() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class NestedSubQueryContext extends ParserRuleContext { + public NestedSubQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_nestedSubQuery; } + + public NestedSubQueryContext() { } + public void copyFrom(NestedSubQueryContext ctx) { + super.copyFrom(ctx); + } + } + @SuppressWarnings("CheckReturnValue") + public static class BooleanNestedQueryContext extends NestedSubQueryContext { + public Token operator; + public List nestedSubQuery() { + return getRuleContexts(NestedSubQueryContext.class); + } + public NestedSubQueryContext nestedSubQuery(int i) { + return getRuleContext(NestedSubQueryContext.class,i); + } + public TerminalNode AND() { return getToken(KqlBaseParser.AND, 0); } + public TerminalNode OR() { return getToken(KqlBaseParser.OR, 0); } + public BooleanNestedQueryContext(NestedSubQueryContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterBooleanNestedQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitBooleanNestedQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitBooleanNestedQuery(this); + else return visitor.visitChildren(this); + } + } + @SuppressWarnings("CheckReturnValue") + public static class DefaultNestedQueryContext extends NestedSubQueryContext { + public NestedSimpleSubQueryContext nestedSimpleSubQuery() { + return getRuleContext(NestedSimpleSubQueryContext.class,0); + } + public DefaultNestedQueryContext(NestedSubQueryContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterDefaultNestedQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitDefaultNestedQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitDefaultNestedQuery(this); + else return visitor.visitChildren(this); + } + } + + public final NestedSubQueryContext nestedSubQuery() throws RecognitionException { + return nestedSubQuery(0); + } + + private NestedSubQueryContext nestedSubQuery(int _p) throws RecognitionException { + ParserRuleContext _parentctx = _ctx; + int _parentState = getState(); + NestedSubQueryContext _localctx = new NestedSubQueryContext(_ctx, _parentState); + NestedSubQueryContext _prevctx = _localctx; + int _startState = 10; + enterRecursionRule(_localctx, 10, RULE_nestedSubQuery, _p); + int _la; + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + { + _localctx = new DefaultNestedQueryContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + + setState(70); + nestedSimpleSubQuery(); + } + _ctx.stop = _input.LT(-1); + setState(77); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,3,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + if ( _parseListeners!=null ) triggerExitRuleEvent(); + _prevctx = _localctx; + { + { + _localctx = new BooleanNestedQueryContext(new NestedSubQueryContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_nestedSubQuery); + setState(72); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + setState(73); + ((BooleanNestedQueryContext)_localctx).operator = _input.LT(1); + _la = _input.LA(1); + if ( !(_la==AND || _la==OR) ) { + ((BooleanNestedQueryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + setState(74); + nestedSubQuery(2); + } + } + } + setState(79); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,3,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + unrollRecursionContexts(_parentctx); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class NestedSimpleSubQueryContext extends ParserRuleContext { + public NotQueryContext notQuery() { + return getRuleContext(NotQueryContext.class,0); + } + public NestedQueryContext nestedQuery() { + return getRuleContext(NestedQueryContext.class,0); + } + public NestedParenthesizedQueryContext nestedParenthesizedQuery() { + return getRuleContext(NestedParenthesizedQueryContext.class,0); + } + public ExistsQueryContext existsQuery() { + return getRuleContext(ExistsQueryContext.class,0); + } + public RangeQueryContext rangeQuery() { + return getRuleContext(RangeQueryContext.class,0); + } + public FieldQueryContext fieldQuery() { + return getRuleContext(FieldQueryContext.class,0); + } + public NestedSimpleSubQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_nestedSimpleSubQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterNestedSimpleSubQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitNestedSimpleSubQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitNestedSimpleSubQuery(this); + else return visitor.visitChildren(this); + } + } + + public final NestedSimpleSubQueryContext nestedSimpleSubQuery() throws RecognitionException { + NestedSimpleSubQueryContext _localctx = new NestedSimpleSubQueryContext(_ctx, getState()); + enterRule(_localctx, 12, RULE_nestedSimpleSubQuery); + try { + setState(86); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(80); + notQuery(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(81); + nestedQuery(); + } + break; + case 3: + enterOuterAlt(_localctx, 3); + { + setState(82); + nestedParenthesizedQuery(); + } + break; + case 4: + enterOuterAlt(_localctx, 4); + { + setState(83); + existsQuery(); + } + break; + case 5: + enterOuterAlt(_localctx, 5); + { + setState(84); + rangeQuery(); + } + break; + case 6: + enterOuterAlt(_localctx, 6); + { + setState(85); + fieldQuery(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class NestedParenthesizedQueryContext extends ParserRuleContext { + public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } + public NestedSubQueryContext nestedSubQuery() { + return getRuleContext(NestedSubQueryContext.class,0); + } + public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } + public NestedParenthesizedQueryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_nestedParenthesizedQuery; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterNestedParenthesizedQuery(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitNestedParenthesizedQuery(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor)visitor).visitNestedParenthesizedQuery(this); + else return visitor.visitChildren(this); + } + } + + public final NestedParenthesizedQueryContext nestedParenthesizedQuery() throws RecognitionException { + NestedParenthesizedQueryContext _localctx = new NestedParenthesizedQueryContext(_ctx, getState()); + enterRule(_localctx, 14, RULE_nestedParenthesizedQuery); + try { + enterOuterAlt(_localctx, 1); + { + setState(88); + match(LEFT_PARENTHESIS); + setState(89); + nestedSubQuery(0); + setState(90); + match(RIGHT_PARENTHESIS); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class MatchAllQueryContext extends ParserRuleContext { public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); } @@ -551,23 +836,23 @@ public T accept(ParseTreeVisitor visitor) { public final MatchAllQueryContext matchAllQuery() throws RecognitionException { MatchAllQueryContext _localctx = new MatchAllQueryContext(_ctx, getState()); - enterRule(_localctx, 10, RULE_matchAllQuery); + enterRule(_localctx, 16, RULE_matchAllQuery); try { enterOuterAlt(_localctx, 1); { - setState(65); + setState(94); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(63); + setState(92); match(WILDCARD); - setState(64); + setState(93); match(COLON); } break; } - setState(67); + setState(96); match(WILDCARD); } } @@ -610,15 +895,15 @@ public T accept(ParseTreeVisitor visitor) { public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionException { ParenthesizedQueryContext _localctx = new ParenthesizedQueryContext(_ctx, getState()); - enterRule(_localctx, 12, RULE_parenthesizedQuery); + enterRule(_localctx, 18, RULE_parenthesizedQuery); try { enterOuterAlt(_localctx, 1); { - setState(69); + setState(98); match(LEFT_PARENTHESIS); - setState(70); + setState(99); query(0); - setState(71); + setState(100); match(RIGHT_PARENTHESIS); } } @@ -667,14 +952,14 @@ public T accept(ParseTreeVisitor visitor) { public final RangeQueryContext rangeQuery() throws RecognitionException { RangeQueryContext _localctx = new RangeQueryContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_rangeQuery); + enterRule(_localctx, 20, RULE_rangeQuery); int _la; try { enterOuterAlt(_localctx, 1); { - setState(73); + setState(102); fieldName(); - setState(74); + setState(103); ((RangeQueryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 960L) != 0)) ) { @@ -685,7 +970,7 @@ public final RangeQueryContext rangeQuery() throws RecognitionException { _errHandler.reportMatch(this); consume(); } - setState(75); + setState(104); rangeQueryValue(); } } @@ -732,18 +1017,18 @@ public T accept(ParseTreeVisitor visitor) { public final RangeQueryValueContext rangeQueryValue() throws RecognitionException { RangeQueryValueContext _localctx = new RangeQueryValueContext(_ctx, getState()); - enterRule(_localctx, 16, RULE_rangeQueryValue); + enterRule(_localctx, 22, RULE_rangeQueryValue); int _la; try { int _alt; - setState(83); + setState(112); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_LITERAL: case WILDCARD: enterOuterAlt(_localctx, 1); { - setState(78); + setState(107); _errHandler.sync(this); _alt = 1; do { @@ -751,7 +1036,7 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio case 1: { { - setState(77); + setState(106); _la = _input.LA(1); if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { _errHandler.recoverInline(this); @@ -767,16 +1052,16 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio default: throw new NoViableAltException(this); } - setState(80); + setState(109); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,4,_ctx); + _alt = getInterpreter().adaptivePredict(_input,6,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; case QUOTED_STRING: enterOuterAlt(_localctx, 2); { - setState(82); + setState(111); match(QUOTED_STRING); } break; @@ -823,15 +1108,15 @@ public T accept(ParseTreeVisitor visitor) { public final ExistsQueryContext existsQuery() throws RecognitionException { ExistsQueryContext _localctx = new ExistsQueryContext(_ctx, getState()); - enterRule(_localctx, 18, RULE_existsQuery); + enterRule(_localctx, 24, RULE_existsQuery); try { enterOuterAlt(_localctx, 1); { - setState(85); + setState(114); fieldName(); - setState(86); + setState(115); match(COLON); - setState(87); + setState(116); match(WILDCARD); } } @@ -878,34 +1163,34 @@ public T accept(ParseTreeVisitor visitor) { public final FieldQueryContext fieldQuery() throws RecognitionException { FieldQueryContext _localctx = new FieldQueryContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_fieldQuery); + enterRule(_localctx, 26, RULE_fieldQuery); try { - setState(99); + setState(128); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(89); + setState(118); fieldName(); - setState(90); + setState(119); match(COLON); - setState(91); + setState(120); fieldQueryValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(93); + setState(122); fieldName(); - setState(94); + setState(123); match(COLON); - setState(95); + setState(124); match(LEFT_PARENTHESIS); - setState(96); + setState(125); fieldQueryValue(); - setState(97); + setState(126); match(RIGHT_PARENTHESIS); } break; @@ -950,9 +1235,9 @@ public T accept(ParseTreeVisitor visitor) { public final FieldLessQueryContext fieldLessQuery() throws RecognitionException { FieldLessQueryContext _localctx = new FieldLessQueryContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_fieldLessQuery); + enterRule(_localctx, 28, RULE_fieldLessQuery); try { - setState(106); + setState(135); _errHandler.sync(this); switch (_input.LA(1)) { case AND: @@ -963,18 +1248,18 @@ public final FieldLessQueryContext fieldLessQuery() throws RecognitionException case WILDCARD: enterOuterAlt(_localctx, 1); { - setState(101); + setState(130); fieldQueryValue(); } break; case LEFT_PARENTHESIS: enterOuterAlt(_localctx, 2); { - setState(102); + setState(131); match(LEFT_PARENTHESIS); - setState(103); + setState(132); fieldQueryValue(); - setState(104); + setState(133); match(RIGHT_PARENTHESIS); } break; @@ -1037,22 +1322,22 @@ public T accept(ParseTreeVisitor visitor) { public final FieldQueryValueContext fieldQueryValue() throws RecognitionException { FieldQueryValueContext _localctx = new FieldQueryValueContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_fieldQueryValue); + enterRule(_localctx, 30, RULE_fieldQueryValue); int _la; try { int _alt; - setState(128); + setState(157); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(109); + setState(138); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) { { - setState(108); + setState(137); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { _errHandler.recoverInline(this); @@ -1065,7 +1350,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio } } - setState(112); + setState(141); _errHandler.sync(this); _alt = 1; do { @@ -1073,7 +1358,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 1: { { - setState(111); + setState(140); _la = _input.LA(1); if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) { _errHandler.recoverInline(this); @@ -1089,16 +1374,16 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio default: throw new NoViableAltException(this); } - setState(114); + setState(143); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,9,_ctx); + _alt = getInterpreter().adaptivePredict(_input,11,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(117); + setState(146); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,10,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: { - setState(116); + setState(145); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { _errHandler.recoverInline(this); @@ -1116,7 +1401,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 2: enterOuterAlt(_localctx, 2); { - setState(119); + setState(148); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { _errHandler.recoverInline(this); @@ -1126,12 +1411,12 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio _errHandler.reportMatch(this); consume(); } - setState(121); + setState(150); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: { - setState(120); + setState(149); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) { _errHandler.recoverInline(this); @@ -1149,14 +1434,14 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 3: enterOuterAlt(_localctx, 3); { - setState(123); + setState(152); match(NOT); - setState(125); + setState(154); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: { - setState(124); + setState(153); _la = _input.LA(1); if ( !(_la==AND || _la==OR) ) { _errHandler.recoverInline(this); @@ -1174,7 +1459,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio case 4: enterOuterAlt(_localctx, 4); { - setState(127); + setState(156); match(QUOTED_STRING); } break; @@ -1218,29 +1503,29 @@ public T accept(ParseTreeVisitor visitor) { public final FieldNameContext fieldName() throws RecognitionException { FieldNameContext _localctx = new FieldNameContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_fieldName); + enterRule(_localctx, 32, RULE_fieldName); try { - setState(133); + setState(162); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_LITERAL: enterOuterAlt(_localctx, 1); { - setState(130); + setState(159); ((FieldNameContext)_localctx).value = match(UNQUOTED_LITERAL); } break; case QUOTED_STRING: enterOuterAlt(_localctx, 2); { - setState(131); + setState(160); ((FieldNameContext)_localctx).value = match(QUOTED_STRING); } break; case WILDCARD: enterOuterAlt(_localctx, 3); { - setState(132); + setState(161); ((FieldNameContext)_localctx).value = match(WILDCARD); } break; @@ -1263,6 +1548,8 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: return query_sempred((QueryContext)_localctx, predIndex); + case 5: + return nestedSubQuery_sempred((NestedSubQueryContext)_localctx, predIndex); } return true; } @@ -1273,87 +1560,117 @@ private boolean query_sempred(QueryContext _localctx, int predIndex) { } return true; } + private boolean nestedSubQuery_sempred(NestedSubQueryContext _localctx, int predIndex) { + switch (predIndex) { + case 1: + return precpred(_ctx, 2); + } + return true; + } public static final String _serializedATN = - "\u0004\u0001\u0010\u0088\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0010\u00a5\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ - "\u0002\f\u0007\f\u0002\r\u0007\r\u0001\u0000\u0003\u0000\u001e\b\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0005\u0001(\b\u0001\n\u0001\f\u0001+\t\u0001"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0003\u00025\b\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0005\u0001\u0005\u0003\u0005B\b\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0004\bO\b\b\u000b\b\f\b"+ - "P\u0001\b\u0003\bT\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003"+ - "\nd\b\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0003"+ - "\u000bk\b\u000b\u0001\f\u0003\fn\b\f\u0001\f\u0004\fq\b\f\u000b\f\f\f"+ - "r\u0001\f\u0003\fv\b\f\u0001\f\u0001\f\u0003\fz\b\f\u0001\f\u0001\f\u0003"+ - "\f~\b\f\u0001\f\u0003\f\u0081\b\f\u0001\r\u0001\r\u0001\r\u0003\r\u0086"+ - "\b\r\u0001\r\u0000\u0001\u0002\u000e\u0000\u0002\u0004\u0006\b\n\f\u000e"+ - "\u0010\u0012\u0014\u0016\u0018\u001a\u0000\u0004\u0001\u0000\u0002\u0003"+ - "\u0001\u0000\u0006\t\u0002\u0000\u000e\u000e\u0010\u0010\u0001\u0000\u0002"+ - "\u0004\u0091\u0000\u001d\u0001\u0000\u0000\u0000\u0002!\u0001\u0000\u0000"+ - "\u0000\u00044\u0001\u0000\u0000\u0000\u00066\u0001\u0000\u0000\u0000\b"+ - "9\u0001\u0000\u0000\u0000\nA\u0001\u0000\u0000\u0000\fE\u0001\u0000\u0000"+ - "\u0000\u000eI\u0001\u0000\u0000\u0000\u0010S\u0001\u0000\u0000\u0000\u0012"+ - "U\u0001\u0000\u0000\u0000\u0014c\u0001\u0000\u0000\u0000\u0016j\u0001"+ - "\u0000\u0000\u0000\u0018\u0080\u0001\u0000\u0000\u0000\u001a\u0085\u0001"+ - "\u0000\u0000\u0000\u001c\u001e\u0003\u0002\u0001\u0000\u001d\u001c\u0001"+ - "\u0000\u0000\u0000\u001d\u001e\u0001\u0000\u0000\u0000\u001e\u001f\u0001"+ - "\u0000\u0000\u0000\u001f \u0005\u0000\u0000\u0001 \u0001\u0001\u0000\u0000"+ - "\u0000!\"\u0006\u0001\uffff\uffff\u0000\"#\u0003\u0004\u0002\u0000#)\u0001"+ - "\u0000\u0000\u0000$%\n\u0002\u0000\u0000%&\u0007\u0000\u0000\u0000&(\u0003"+ - "\u0002\u0001\u0002\'$\u0001\u0000\u0000\u0000(+\u0001\u0000\u0000\u0000"+ - ")\'\u0001\u0000\u0000\u0000)*\u0001\u0000\u0000\u0000*\u0003\u0001\u0000"+ - "\u0000\u0000+)\u0001\u0000\u0000\u0000,5\u0003\u0006\u0003\u0000-5\u0003"+ - "\b\u0004\u0000.5\u0003\f\u0006\u0000/5\u0003\n\u0005\u000005\u0003\u0012"+ - "\t\u000015\u0003\u000e\u0007\u000025\u0003\u0014\n\u000035\u0003\u0016"+ - "\u000b\u00004,\u0001\u0000\u0000\u00004-\u0001\u0000\u0000\u00004.\u0001"+ - "\u0000\u0000\u00004/\u0001\u0000\u0000\u000040\u0001\u0000\u0000\u0000"+ - "41\u0001\u0000\u0000\u000042\u0001\u0000\u0000\u000043\u0001\u0000\u0000"+ - "\u00005\u0005\u0001\u0000\u0000\u000067\u0005\u0004\u0000\u000078\u0003"+ - "\u0004\u0002\u00008\u0007\u0001\u0000\u0000\u00009:\u0003\u001a\r\u0000"+ - ":;\u0005\u0005\u0000\u0000;<\u0005\f\u0000\u0000<=\u0003\u0002\u0001\u0000"+ - "=>\u0005\r\u0000\u0000>\t\u0001\u0000\u0000\u0000?@\u0005\u0010\u0000"+ - "\u0000@B\u0005\u0005\u0000\u0000A?\u0001\u0000\u0000\u0000AB\u0001\u0000"+ - "\u0000\u0000BC\u0001\u0000\u0000\u0000CD\u0005\u0010\u0000\u0000D\u000b"+ - "\u0001\u0000\u0000\u0000EF\u0005\n\u0000\u0000FG\u0003\u0002\u0001\u0000"+ - "GH\u0005\u000b\u0000\u0000H\r\u0001\u0000\u0000\u0000IJ\u0003\u001a\r"+ - "\u0000JK\u0007\u0001\u0000\u0000KL\u0003\u0010\b\u0000L\u000f\u0001\u0000"+ - "\u0000\u0000MO\u0007\u0002\u0000\u0000NM\u0001\u0000\u0000\u0000OP\u0001"+ - "\u0000\u0000\u0000PN\u0001\u0000\u0000\u0000PQ\u0001\u0000\u0000\u0000"+ - "QT\u0001\u0000\u0000\u0000RT\u0005\u000f\u0000\u0000SN\u0001\u0000\u0000"+ - "\u0000SR\u0001\u0000\u0000\u0000T\u0011\u0001\u0000\u0000\u0000UV\u0003"+ - "\u001a\r\u0000VW\u0005\u0005\u0000\u0000WX\u0005\u0010\u0000\u0000X\u0013"+ - "\u0001\u0000\u0000\u0000YZ\u0003\u001a\r\u0000Z[\u0005\u0005\u0000\u0000"+ - "[\\\u0003\u0018\f\u0000\\d\u0001\u0000\u0000\u0000]^\u0003\u001a\r\u0000"+ - "^_\u0005\u0005\u0000\u0000_`\u0005\n\u0000\u0000`a\u0003\u0018\f\u0000"+ - "ab\u0005\u000b\u0000\u0000bd\u0001\u0000\u0000\u0000cY\u0001\u0000\u0000"+ - "\u0000c]\u0001\u0000\u0000\u0000d\u0015\u0001\u0000\u0000\u0000ek\u0003"+ - "\u0018\f\u0000fg\u0005\n\u0000\u0000gh\u0003\u0018\f\u0000hi\u0005\u000b"+ - "\u0000\u0000ik\u0001\u0000\u0000\u0000je\u0001\u0000\u0000\u0000jf\u0001"+ - "\u0000\u0000\u0000k\u0017\u0001\u0000\u0000\u0000ln\u0007\u0003\u0000"+ - "\u0000ml\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000np\u0001\u0000"+ - "\u0000\u0000oq\u0007\u0002\u0000\u0000po\u0001\u0000\u0000\u0000qr\u0001"+ - "\u0000\u0000\u0000rp\u0001\u0000\u0000\u0000rs\u0001\u0000\u0000\u0000"+ - "su\u0001\u0000\u0000\u0000tv\u0007\u0003\u0000\u0000ut\u0001\u0000\u0000"+ - "\u0000uv\u0001\u0000\u0000\u0000v\u0081\u0001\u0000\u0000\u0000wy\u0007"+ - "\u0000\u0000\u0000xz\u0007\u0003\u0000\u0000yx\u0001\u0000\u0000\u0000"+ - "yz\u0001\u0000\u0000\u0000z\u0081\u0001\u0000\u0000\u0000{}\u0005\u0004"+ - "\u0000\u0000|~\u0007\u0000\u0000\u0000}|\u0001\u0000\u0000\u0000}~\u0001"+ - "\u0000\u0000\u0000~\u0081\u0001\u0000\u0000\u0000\u007f\u0081\u0005\u000f"+ - "\u0000\u0000\u0080m\u0001\u0000\u0000\u0000\u0080w\u0001\u0000\u0000\u0000"+ - "\u0080{\u0001\u0000\u0000\u0000\u0080\u007f\u0001\u0000\u0000\u0000\u0081"+ - "\u0019\u0001\u0000\u0000\u0000\u0082\u0086\u0005\u000e\u0000\u0000\u0083"+ - "\u0086\u0005\u000f\u0000\u0000\u0084\u0086\u0005\u0010\u0000\u0000\u0085"+ - "\u0082\u0001\u0000\u0000\u0000\u0085\u0083\u0001\u0000\u0000\u0000\u0085"+ - "\u0084\u0001\u0000\u0000\u0000\u0086\u001b\u0001\u0000\u0000\u0000\u000f"+ - "\u001d)4APScjmruy}\u0080\u0085"; + "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ + "\u000f\u0002\u0010\u0007\u0010\u0001\u0000\u0003\u0000$\b\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0005\u0001.\b\u0001\n\u0001\f\u00011\t\u0001\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0003\u0002;\b\u0002\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0005\u0005L\b\u0005\n\u0005\f\u0005O\t\u0005\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006W\b"+ + "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0003\b_\b\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\u000b\u0004\u000bl\b\u000b\u000b\u000b"+ + "\f\u000bm\u0001\u000b\u0003\u000bq\b\u000b\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0003\r\u0081\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0003\u000e\u0088\b\u000e\u0001\u000f\u0003\u000f\u008b"+ + "\b\u000f\u0001\u000f\u0004\u000f\u008e\b\u000f\u000b\u000f\f\u000f\u008f"+ + "\u0001\u000f\u0003\u000f\u0093\b\u000f\u0001\u000f\u0001\u000f\u0003\u000f"+ + "\u0097\b\u000f\u0001\u000f\u0001\u000f\u0003\u000f\u009b\b\u000f\u0001"+ + "\u000f\u0003\u000f\u009e\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003"+ + "\u0010\u00a3\b\u0010\u0001\u0010\u0000\u0002\u0002\n\u0011\u0000\u0002"+ + "\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e"+ + " \u0000\u0004\u0001\u0000\u0002\u0003\u0001\u0000\u0006\t\u0002\u0000"+ + "\u000e\u000e\u0010\u0010\u0001\u0000\u0002\u0004\u00b1\u0000#\u0001\u0000"+ + "\u0000\u0000\u0002\'\u0001\u0000\u0000\u0000\u0004:\u0001\u0000\u0000"+ + "\u0000\u0006<\u0001\u0000\u0000\u0000\b?\u0001\u0000\u0000\u0000\nE\u0001"+ + "\u0000\u0000\u0000\fV\u0001\u0000\u0000\u0000\u000eX\u0001\u0000\u0000"+ + "\u0000\u0010^\u0001\u0000\u0000\u0000\u0012b\u0001\u0000\u0000\u0000\u0014"+ + "f\u0001\u0000\u0000\u0000\u0016p\u0001\u0000\u0000\u0000\u0018r\u0001"+ + "\u0000\u0000\u0000\u001a\u0080\u0001\u0000\u0000\u0000\u001c\u0087\u0001"+ + "\u0000\u0000\u0000\u001e\u009d\u0001\u0000\u0000\u0000 \u00a2\u0001\u0000"+ + "\u0000\u0000\"$\u0003\u0002\u0001\u0000#\"\u0001\u0000\u0000\u0000#$\u0001"+ + "\u0000\u0000\u0000$%\u0001\u0000\u0000\u0000%&\u0005\u0000\u0000\u0001"+ + "&\u0001\u0001\u0000\u0000\u0000\'(\u0006\u0001\uffff\uffff\u0000()\u0003"+ + "\u0004\u0002\u0000)/\u0001\u0000\u0000\u0000*+\n\u0002\u0000\u0000+,\u0007"+ + "\u0000\u0000\u0000,.\u0003\u0002\u0001\u0002-*\u0001\u0000\u0000\u0000"+ + ".1\u0001\u0000\u0000\u0000/-\u0001\u0000\u0000\u0000/0\u0001\u0000\u0000"+ + "\u00000\u0003\u0001\u0000\u0000\u00001/\u0001\u0000\u0000\u00002;\u0003"+ + "\u0006\u0003\u00003;\u0003\b\u0004\u00004;\u0003\u0012\t\u00005;\u0003"+ + "\u0010\b\u00006;\u0003\u0018\f\u00007;\u0003\u0014\n\u00008;\u0003\u001a"+ + "\r\u00009;\u0003\u001c\u000e\u0000:2\u0001\u0000\u0000\u0000:3\u0001\u0000"+ + "\u0000\u0000:4\u0001\u0000\u0000\u0000:5\u0001\u0000\u0000\u0000:6\u0001"+ + "\u0000\u0000\u0000:7\u0001\u0000\u0000\u0000:8\u0001\u0000\u0000\u0000"+ + ":9\u0001\u0000\u0000\u0000;\u0005\u0001\u0000\u0000\u0000<=\u0005\u0004"+ + "\u0000\u0000=>\u0003\u0004\u0002\u0000>\u0007\u0001\u0000\u0000\u0000"+ + "?@\u0003 \u0010\u0000@A\u0005\u0005\u0000\u0000AB\u0005\f\u0000\u0000"+ + "BC\u0003\n\u0005\u0000CD\u0005\r\u0000\u0000D\t\u0001\u0000\u0000\u0000"+ + "EF\u0006\u0005\uffff\uffff\u0000FG\u0003\f\u0006\u0000GM\u0001\u0000\u0000"+ + "\u0000HI\n\u0002\u0000\u0000IJ\u0007\u0000\u0000\u0000JL\u0003\n\u0005"+ + "\u0002KH\u0001\u0000\u0000\u0000LO\u0001\u0000\u0000\u0000MK\u0001\u0000"+ + "\u0000\u0000MN\u0001\u0000\u0000\u0000N\u000b\u0001\u0000\u0000\u0000"+ + "OM\u0001\u0000\u0000\u0000PW\u0003\u0006\u0003\u0000QW\u0003\b\u0004\u0000"+ + "RW\u0003\u000e\u0007\u0000SW\u0003\u0018\f\u0000TW\u0003\u0014\n\u0000"+ + "UW\u0003\u001a\r\u0000VP\u0001\u0000\u0000\u0000VQ\u0001\u0000\u0000\u0000"+ + "VR\u0001\u0000\u0000\u0000VS\u0001\u0000\u0000\u0000VT\u0001\u0000\u0000"+ + "\u0000VU\u0001\u0000\u0000\u0000W\r\u0001\u0000\u0000\u0000XY\u0005\n"+ + "\u0000\u0000YZ\u0003\n\u0005\u0000Z[\u0005\u000b\u0000\u0000[\u000f\u0001"+ + "\u0000\u0000\u0000\\]\u0005\u0010\u0000\u0000]_\u0005\u0005\u0000\u0000"+ + "^\\\u0001\u0000\u0000\u0000^_\u0001\u0000\u0000\u0000_`\u0001\u0000\u0000"+ + "\u0000`a\u0005\u0010\u0000\u0000a\u0011\u0001\u0000\u0000\u0000bc\u0005"+ + "\n\u0000\u0000cd\u0003\u0002\u0001\u0000de\u0005\u000b\u0000\u0000e\u0013"+ + "\u0001\u0000\u0000\u0000fg\u0003 \u0010\u0000gh\u0007\u0001\u0000\u0000"+ + "hi\u0003\u0016\u000b\u0000i\u0015\u0001\u0000\u0000\u0000jl\u0007\u0002"+ + "\u0000\u0000kj\u0001\u0000\u0000\u0000lm\u0001\u0000\u0000\u0000mk\u0001"+ + "\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000nq\u0001\u0000\u0000\u0000"+ + "oq\u0005\u000f\u0000\u0000pk\u0001\u0000\u0000\u0000po\u0001\u0000\u0000"+ + "\u0000q\u0017\u0001\u0000\u0000\u0000rs\u0003 \u0010\u0000st\u0005\u0005"+ + "\u0000\u0000tu\u0005\u0010\u0000\u0000u\u0019\u0001\u0000\u0000\u0000"+ + "vw\u0003 \u0010\u0000wx\u0005\u0005\u0000\u0000xy\u0003\u001e\u000f\u0000"+ + "y\u0081\u0001\u0000\u0000\u0000z{\u0003 \u0010\u0000{|\u0005\u0005\u0000"+ + "\u0000|}\u0005\n\u0000\u0000}~\u0003\u001e\u000f\u0000~\u007f\u0005\u000b"+ + "\u0000\u0000\u007f\u0081\u0001\u0000\u0000\u0000\u0080v\u0001\u0000\u0000"+ + "\u0000\u0080z\u0001\u0000\u0000\u0000\u0081\u001b\u0001\u0000\u0000\u0000"+ + "\u0082\u0088\u0003\u001e\u000f\u0000\u0083\u0084\u0005\n\u0000\u0000\u0084"+ + "\u0085\u0003\u001e\u000f\u0000\u0085\u0086\u0005\u000b\u0000\u0000\u0086"+ + "\u0088\u0001\u0000\u0000\u0000\u0087\u0082\u0001\u0000\u0000\u0000\u0087"+ + "\u0083\u0001\u0000\u0000\u0000\u0088\u001d\u0001\u0000\u0000\u0000\u0089"+ + "\u008b\u0007\u0003\u0000\u0000\u008a\u0089\u0001\u0000\u0000\u0000\u008a"+ + "\u008b\u0001\u0000\u0000\u0000\u008b\u008d\u0001\u0000\u0000\u0000\u008c"+ + "\u008e\u0007\u0002\u0000\u0000\u008d\u008c\u0001\u0000\u0000\u0000\u008e"+ + "\u008f\u0001\u0000\u0000\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u008f"+ + "\u0090\u0001\u0000\u0000\u0000\u0090\u0092\u0001\u0000\u0000\u0000\u0091"+ + "\u0093\u0007\u0003\u0000\u0000\u0092\u0091\u0001\u0000\u0000\u0000\u0092"+ + "\u0093\u0001\u0000\u0000\u0000\u0093\u009e\u0001\u0000\u0000\u0000\u0094"+ + "\u0096\u0007\u0000\u0000\u0000\u0095\u0097\u0007\u0003\u0000\u0000\u0096"+ + "\u0095\u0001\u0000\u0000\u0000\u0096\u0097\u0001\u0000\u0000\u0000\u0097"+ + "\u009e\u0001\u0000\u0000\u0000\u0098\u009a\u0005\u0004\u0000\u0000\u0099"+ + "\u009b\u0007\u0000\u0000\u0000\u009a\u0099\u0001\u0000\u0000\u0000\u009a"+ + "\u009b\u0001\u0000\u0000\u0000\u009b\u009e\u0001\u0000\u0000\u0000\u009c"+ + "\u009e\u0005\u000f\u0000\u0000\u009d\u008a\u0001\u0000\u0000\u0000\u009d"+ + "\u0094\u0001\u0000\u0000\u0000\u009d\u0098\u0001\u0000\u0000\u0000\u009d"+ + "\u009c\u0001\u0000\u0000\u0000\u009e\u001f\u0001\u0000\u0000\u0000\u009f"+ + "\u00a3\u0005\u000e\u0000\u0000\u00a0\u00a3\u0005\u000f\u0000\u0000\u00a1"+ + "\u00a3\u0005\u0010\u0000\u0000\u00a2\u009f\u0001\u0000\u0000\u0000\u00a2"+ + "\u00a0\u0001\u0000\u0000\u0000\u00a2\u00a1\u0001\u0000\u0000\u0000\u00a3"+ + "!\u0001\u0000\u0000\u0000\u0011#/:MV^mp\u0080\u0087\u008a\u008f\u0092"+ + "\u0096\u009a\u009d\u00a2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java index 18ef8f389195..8200bfe0da25 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java @@ -56,6 +56,32 @@ interface KqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitNestedQuery(KqlBaseParser.NestedQueryContext ctx); + /** + * Visit a parse tree produced by the {@code booleanNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBooleanNestedQuery(KqlBaseParser.BooleanNestedQueryContext ctx); + /** + * Visit a parse tree produced by the {@code defaultNestedQuery} + * labeled alternative in {@link KqlBaseParser#nestedSubQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDefaultNestedQuery(KqlBaseParser.DefaultNestedQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#nestedSimpleSubQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNestedSimpleSubQuery(KqlBaseParser.NestedSimpleSubQueryContext ctx); + /** + * Visit a parse tree produced by {@link KqlBaseParser#nestedParenthesizedQuery}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNestedParenthesizedQuery(KqlBaseParser.NestedParenthesizedQueryContext ctx); /** * Visit a parse tree produced by {@link KqlBaseParser#matchAllQuery}. * @param ctx the parse tree diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java index 5f88080fb3ed..30740833ee40 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java @@ -11,11 +11,18 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NestedLookup; +import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.support.NestedScope; import java.time.ZoneId; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.Supplier; + +import static org.elasticsearch.common.Strings.format; public class KqlParsingContext { @@ -32,10 +39,11 @@ public static Builder builder(QueryRewriteContext queryRewriteContext) { return new Builder(queryRewriteContext); } - private QueryRewriteContext queryRewriteContext; + private final QueryRewriteContext queryRewriteContext; private final boolean caseInsensitive; private final ZoneId timeZone; private final String defaultField; + private final NestedScope nestedScope = new NestedScope(); public KqlParsingContext(QueryRewriteContext queryRewriteContext, boolean caseInsensitive, ZoneId timeZone, String defaultField) { this.queryRewriteContext = queryRewriteContext; @@ -56,9 +64,17 @@ public String defaultField() { return defaultField; } + public String nestedPath(String fieldName) { + return nestedLookup().getNestedParent(fieldName); + } + + public boolean isNestedField(String fieldName) { + return nestedMappers().containsKey(fullFieldName(fieldName)); + } + public Set resolveFieldNames(String fieldNamePattern) { assert fieldNamePattern != null && fieldNamePattern.isEmpty() == false : "fieldNamePattern cannot be null or empty"; - return queryRewriteContext.getMatchingFieldNames(fieldNamePattern); + return queryRewriteContext.getMatchingFieldNames(fullFieldName(fieldNamePattern)); } public Set resolveDefaultFieldNames() { @@ -89,6 +105,38 @@ public boolean isSearchableField(String fieldName) { return isSearchableField(fieldName, fieldType(fieldName)); } + public NestedScope nestedScope() { + return nestedScope; + } + + public T withNestedPath(String nestedFieldName, Supplier supplier) { + assert isNestedField(nestedFieldName); + nestedScope.nextLevel(nestedMappers().get(fullFieldName(nestedFieldName))); + T result = supplier.get(); + nestedScope.previousLevel(); + return result; + } + + public String currentNestedPath() { + return nestedScope().getObjectMapper() != null ? nestedScope().getObjectMapper().fullPath() : null; + } + + public String fullFieldName(String fieldName) { + if (nestedScope.getObjectMapper() == null) { + return fieldName; + } + + return format("%s.%s", nestedScope.getObjectMapper().fullPath(), fieldName); + } + + private NestedLookup nestedLookup() { + return queryRewriteContext.getMappingLookup().nestedLookup(); + } + + private Map nestedMappers() { + return nestedLookup().getNestedMappers(); + } + public static class Builder { private final QueryRewriteContext queryRewriteContext; private boolean caseInsensitive = true; diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java index 588e60bd4dd7..e6e4e20cfd3c 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java @@ -46,11 +46,9 @@ import static org.hamcrest.Matchers.equalTo; public abstract class AbstractKqlParserTestCase extends AbstractBuilderTestCase { - protected static final String SUPPORTED_QUERY_FILE_PATH = "/supported-queries"; protected static final String UNSUPPORTED_QUERY_FILE_PATH = "/unsupported-queries"; protected static final Predicate BOOLEAN_QUERY_FILTER = (q) -> q.matches("(?i)[^{]*[^\\\\]*(NOT|AND|OR)[^}]*"); - protected static final String NESTED_FIELD_NAME = "mapped_nested"; @Override diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlNestedFieldQueryTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlNestedFieldQueryTests.java new file mode 100644 index 000000000000..5660945fa0db --- /dev/null +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlNestedFieldQueryTests.java @@ -0,0 +1,297 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.NestedQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.elasticsearch.common.Strings.format; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class KqlNestedFieldQueryTests extends AbstractKqlParserTestCase { + public void testInvalidNestedFieldName() { + for (String invalidFieldName : List.of(OBJECT_FIELD_NAME, TEXT_FIELD_NAME, "not_a_field", "mapped_nest*")) { + KqlParsingException e = assertThrows( + KqlParsingException.class, + () -> parseKqlQuery(format("%s : { %s: foo AND %s < 10 } ", invalidFieldName, TEXT_FIELD_NAME, INT_FIELD_NAME)) + ); + assertThat(e.getMessage(), Matchers.containsString(invalidFieldName)); + assertThat(e.getMessage(), Matchers.containsString("is not a valid nested field name")); + } + } + + public void testInlineNestedFieldMatchTextQuery() { + for (String fieldName : List.of(TEXT_FIELD_NAME, INT_FIELD_NAME)) { + { + // Querying a nested text subfield. + String nestedFieldName = format("%s.%s", NESTED_FIELD_NAME, fieldName); + String searchTerms = randomSearchTerms(); + String kqlQueryString = format("%s: %s", nestedFieldName, searchTerms); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + assertMatchQueryBuilder(nestedQuery.query(), nestedFieldName, searchTerms); + } + + { + // Several levels of nested fields. + String nestedFieldName = format("%s.%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME, fieldName); + String searchTerms = randomSearchTerms(); + String kqlQueryString = format("%s: %s", nestedFieldName, searchTerms); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + NestedQueryBuilder nestedSubQuery = asInstanceOf(NestedQueryBuilder.class, nestedQuery.query()); + assertThat(nestedSubQuery.path(), equalTo(format("%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME))); + + assertMatchQueryBuilder(nestedSubQuery.query(), nestedFieldName, searchTerms); + } + } + } + + public void testInlineNestedFieldMatchKeywordFieldQuery() { + { + // Querying a nested text subfield. + String nestedFieldName = format("%s.%s", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME); + String searchTerms = randomSearchTerms(); + String kqlQueryString = format("%s: %s", nestedFieldName, searchTerms); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + assertTermQueryBuilder(nestedQuery.query(), nestedFieldName, searchTerms); + } + + { + // Several levels of nested fields. + String nestedFieldName = format("%s.%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME, KEYWORD_FIELD_NAME); + String searchTerms = randomSearchTerms(); + String kqlQueryString = format("%s: %s", nestedFieldName, searchTerms); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + NestedQueryBuilder nestedSubQuery = asInstanceOf(NestedQueryBuilder.class, nestedQuery.query()); + assertThat(nestedSubQuery.path(), equalTo(format("%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME))); + + assertTermQueryBuilder(nestedSubQuery.query(), nestedFieldName, searchTerms); + } + } + + public void testInlineNestedFieldRangeQuery() { + { + // Querying a nested text subfield. + String nestedFieldName = format("%s.%s", NESTED_FIELD_NAME, INT_FIELD_NAME); + String operator = randomFrom(">", ">=", "<", "<="); + String kqlQueryString = format("%s %s %s", nestedFieldName, operator, randomDouble()); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + assertRangeQueryBuilder(nestedQuery.query(), nestedFieldName, rangeQueryBuilder -> {}); + } + + { + // Several levels of nested fields. + String nestedFieldName = format("%s.%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME, INT_FIELD_NAME); + String operator = randomFrom(">", ">=", "<", "<="); + String kqlQueryString = format("%s %s %s", nestedFieldName, operator, randomDouble()); + + NestedQueryBuilder nestedQuery = asInstanceOf(NestedQueryBuilder.class, parseKqlQuery(kqlQueryString)); + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + NestedQueryBuilder nestedSubQuery = asInstanceOf(NestedQueryBuilder.class, nestedQuery.query()); + assertThat(nestedSubQuery.path(), equalTo(format("%s.%s", NESTED_FIELD_NAME, NESTED_FIELD_NAME))); + + assertRangeQueryBuilder(nestedSubQuery.query(), nestedFieldName, rangeQueryBuilder -> {}); + } + } + + public void testNestedQuerySyntax() { + // Single word - Keyword & text field + List.of(KEYWORD_FIELD_NAME, TEXT_FIELD_NAME) + .forEach( + fieldName -> assertThat( + parseKqlQuery(format("%s : { %s : %s }", NESTED_FIELD_NAME, fieldName, "foo")), + equalTo(parseKqlQuery(format("%s.%s : %s", NESTED_FIELD_NAME, fieldName, "foo"))) + ) + ); + + // Multiple words - Keyword & text field + List.of(KEYWORD_FIELD_NAME, TEXT_FIELD_NAME) + .forEach( + fieldName -> assertThat( + parseKqlQuery(format("%s : { %s : %s }", NESTED_FIELD_NAME, fieldName, "foo bar")), + equalTo(parseKqlQuery(format("%s.%s : %s", NESTED_FIELD_NAME, fieldName, "foo bar"))) + ) + ); + + // Range syntax + { + String operator = randomFrom("<", "<=", ">", ">="); + double rangeValue = randomDouble(); + assertThat( + parseKqlQuery(format("%s : { %s %s %s }", NESTED_FIELD_NAME, INT_FIELD_NAME, operator, rangeValue)), + equalTo(parseKqlQuery(format("%s.%s %s %s", NESTED_FIELD_NAME, INT_FIELD_NAME, operator, rangeValue))) + ); + } + + // Several level of nesting + { + QueryBuilder inlineQuery = parseKqlQuery( + format("%s.%s.%s : %s", NESTED_FIELD_NAME, NESTED_FIELD_NAME, TEXT_FIELD_NAME, "foo bar") + ); + + assertThat( + parseKqlQuery(format("%s : { %s : { %s : %s } }", NESTED_FIELD_NAME, NESTED_FIELD_NAME, TEXT_FIELD_NAME, "foo bar")), + equalTo(inlineQuery) + ); + + assertThat( + parseKqlQuery(format("%s.%s : { %s : %s }", NESTED_FIELD_NAME, NESTED_FIELD_NAME, TEXT_FIELD_NAME, "foo bar")), + equalTo(inlineQuery) + ); + + assertThat( + parseKqlQuery(format("%s : { %s.%s : %s }", NESTED_FIELD_NAME, NESTED_FIELD_NAME, TEXT_FIELD_NAME, "foo bar")), + equalTo(inlineQuery) + ); + } + } + + public void testBooleanAndNestedQuerySyntax() { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery( + format("%s: { %s : foo AND %s: bar AND %s > 3}", NESTED_FIELD_NAME, TEXT_FIELD_NAME, KEYWORD_FIELD_NAME, INT_FIELD_NAME) + ) + ); + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.should(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.mustNot(), empty()); + assertThat(subQuery.must(), hasSize(3)); + assertMatchQueryBuilder( + subQuery.must().stream().filter(q -> q instanceof MatchQueryBuilder).findFirst().get(), + format("%s.%s", NESTED_FIELD_NAME, TEXT_FIELD_NAME), + "foo" + ); + assertTermQueryBuilder( + subQuery.must().stream().filter(q -> q instanceof TermQueryBuilder).findFirst().get(), + format("%s.%s", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME), + "bar" + ); + assertRangeQueryBuilder( + subQuery.must().stream().filter(q -> q instanceof RangeQueryBuilder).findAny().get(), + format("%s.%s", NESTED_FIELD_NAME, INT_FIELD_NAME), + q -> {} + ); + } + + public void testBooleanOrNestedQuerySyntax() { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery( + format("%s: { %s : foo OR %s: bar OR %s > 3 }", NESTED_FIELD_NAME, TEXT_FIELD_NAME, KEYWORD_FIELD_NAME, INT_FIELD_NAME) + ) + ); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.must(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.mustNot(), empty()); + assertThat(subQuery.should(), hasSize(3)); + assertMatchQueryBuilder( + subQuery.should().stream().filter(q -> q instanceof MatchQueryBuilder).findFirst().get(), + format("%s.%s", NESTED_FIELD_NAME, TEXT_FIELD_NAME), + "foo" + ); + assertTermQueryBuilder( + subQuery.should().stream().filter(q -> q instanceof TermQueryBuilder).findFirst().get(), + format("%s.%s", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME), + "bar" + ); + assertRangeQueryBuilder( + subQuery.should().stream().filter(q -> q instanceof RangeQueryBuilder).findAny().get(), + format("%s.%s", NESTED_FIELD_NAME, INT_FIELD_NAME), + q -> {} + ); + } + + public void testBooleanNotNestedQuerySyntax() { + { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery(format("%s: { NOT %s : foo }", NESTED_FIELD_NAME, TEXT_FIELD_NAME)) + ); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.must(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.should(), empty()); + assertThat(subQuery.mustNot(), hasSize(1)); + assertMatchQueryBuilder(subQuery.mustNot().get(0), format("%s.%s", NESTED_FIELD_NAME, TEXT_FIELD_NAME), "foo"); + } + + { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery(format("%s: { NOT %s : foo }", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME)) + ); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.must(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.should(), empty()); + assertThat(subQuery.mustNot(), hasSize(1)); + assertTermQueryBuilder(subQuery.mustNot().get(0), format("%s.%s", NESTED_FIELD_NAME, KEYWORD_FIELD_NAME), "foo"); + } + + { + NestedQueryBuilder nestedQuery = asInstanceOf( + NestedQueryBuilder.class, + parseKqlQuery(format("%s: { NOT %s < 3 }", NESTED_FIELD_NAME, INT_FIELD_NAME)) + ); + + assertThat(nestedQuery.path(), equalTo(NESTED_FIELD_NAME)); + + BoolQueryBuilder subQuery = asInstanceOf(BoolQueryBuilder.class, nestedQuery.query()); + assertThat(subQuery.must(), empty()); + assertThat(subQuery.filter(), empty()); + assertThat(subQuery.should(), empty()); + assertThat(subQuery.mustNot(), hasSize(1)); + assertRangeQueryBuilder(subQuery.mustNot().get(0), format("%s.%s", NESTED_FIELD_NAME, INT_FIELD_NAME), q -> {}); + } + } + + private static String randomSearchTerms() { + return Stream.generate(ESTestCase::randomIdentifier).limit(randomIntBetween(1, 10)).collect(Collectors.joining(" ")); + } +} diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserExistsQueryTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserExistsQueryTests.java index 45dd3312bbc0..6415cdb94ada 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserExistsQueryTests.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/KqlParserExistsQueryTests.java @@ -10,7 +10,10 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.NestedQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; + +import java.util.regex.Pattern; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; @@ -35,11 +38,18 @@ public void testParseExistsQueryWithNoMatchingFields() { public void testParseExistsQueryWithASingleField() { for (String fieldName : searchableFields()) { - ExistsQueryBuilder parsedQuery = asInstanceOf(ExistsQueryBuilder.class, parseKqlQuery(kqlExistsQuery(fieldName))); - assertThat(parsedQuery.fieldName(), equalTo(fieldName)); + QueryBuilder parsedQuery = parseKqlQuery(kqlExistsQuery(fieldName)); // Using quotes to wrap the field name does not change the result. assertThat(parseKqlQuery(kqlExistsQuery("\"" + fieldName + "\"")), equalTo(parsedQuery)); + + long nestingLevel = Pattern.compile("[.]").splitAsStream(fieldName).takeWhile(s -> s.equals(NESTED_FIELD_NAME)).count(); + for (int i = 0; i < nestingLevel; i++) { + parsedQuery = asInstanceOf(NestedQueryBuilder.class, parsedQuery).query(); + } + + ExistsQueryBuilder existsQuery = asInstanceOf(ExistsQueryBuilder.class, parsedQuery); + assertThat(existsQuery.fieldName(), equalTo(fieldName)); } } @@ -53,7 +63,9 @@ public void testParseExistsQueryUsingWildcardFieldName() { assertThat( parsedQuery.should(), - containsInAnyOrder(searchableFields(fieldNamePattern).stream().map(QueryBuilders::existsQuery).toArray()) + containsInAnyOrder( + searchableFields(fieldNamePattern).stream().map(fieldName -> parseKqlQuery(kqlExistsQuery(fieldName))).toArray() + ) ); } diff --git a/x-pack/plugin/kql/src/test/resources/supported-queries b/x-pack/plugin/kql/src/test/resources/supported-queries index b659b1ae5b1d..f54a1d32fe3b 100644 --- a/x-pack/plugin/kql/src/test/resources/supported-queries +++ b/x-pack/plugin/kql/src/test/resources/supported-queries @@ -91,13 +91,6 @@ mapped_nested: { NOT(mapped_string:foo AND mapped_string_2:foo bar) } mapped_nested: { NOT mapped_string:foo AND NOT mapped_string_2:foo bar } mapped_nested: { (NOT mapped_string:foo) AND (NOT mapped_string_2:foo bar) } mapped_nested: { NOT(mapped_string:foo) AND NOT(mapped_string_2:foo bar) } -mapped_nested: { mapped_string:foo AND mapped_string_2:foo bar AND foo bar } -mapped_nested: { mapped_string:foo AND mapped_string_2:foo bar OR foo bar } -mapped_nested: { mapped_string:foo OR mapped_string_2:foo bar OR foo bar } -mapped_nested: { mapped_string:foo OR mapped_string_2:foo bar AND foo bar } -mapped_nested: { mapped_string:foo AND (mapped_string_2:foo bar OR foo bar) } -mapped_nested: { mapped_string:foo AND (mapped_string_2:foo bar OR foo bar) } -mapped_nested: { mapped_string:foo OR (mapped_string_2:foo bar OR foo bar) } mapped_nested: { mapped_str*:foo } mapped_nested: { mapped_nested : { mapped_string:foo AND mapped_int < 3 } AND mapped_string_2:foo bar } mapped_nested: { mapped_nested.mapped_string:foo AND mapped_string_2:foo bar } diff --git a/x-pack/plugin/kql/src/test/resources/unsupported-queries b/x-pack/plugin/kql/src/test/resources/unsupported-queries index 149bcf5bd2b5..526ae94d6ac8 100644 --- a/x-pack/plugin/kql/src/test/resources/unsupported-queries +++ b/x-pack/plugin/kql/src/test/resources/unsupported-queries @@ -25,6 +25,20 @@ mapped_string:(foo (bar)) // Bad syntax for nested fields: mapped_nested { mapped_string: bar } +// Unknown nested field or not a nested field +not_nested : { mapped_string: bar } +mapped_string: { mapped_string: bar } + +// Nested query can not use fieldless subqueries +mapped_nested: { foo } +mapped_nested: { mapped_string:foo AND mapped_string_2:foo bar AND foo bar } +mapped_nested: { mapped_string:foo AND mapped_string_2:foo bar OR foo bar } +mapped_nested: { mapped_string:foo OR mapped_string_2:foo bar OR foo bar } +mapped_nested: { mapped_string:foo OR mapped_string_2:foo bar AND foo bar } +mapped_nested: { mapped_string:foo AND (mapped_string_2:foo bar OR foo bar) } +mapped_nested: { mapped_string:foo AND (mapped_string_2:foo bar OR foo bar) } +mapped_nested: { mapped_string:foo OR (mapped_string_2:foo bar OR foo bar) } + // Missing escape sequences: mapped_string: foo:bar mapped_string: (foo and bar) diff --git a/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/50_kql_nested_fields_query.yml b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/50_kql_nested_fields_query.yml new file mode 100644 index 000000000000..4ce6688e5222 --- /dev/null +++ b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/50_kql_nested_fields_query.yml @@ -0,0 +1,218 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ kql_query ] + test_runner_features: [ capabilities, contains ] + reason: KQL query is not available + + - requires: + "test_runner_features": "contains" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + department: + type: keyword + staff: + type: integer + courses: + type: nested + properties: + name: + type: text + credits: + type: integer + sessions: + type: nested + properties: + semester: + type: keyword + students: + type: integer + + - do: + bulk: + index: test-index + refresh: true + body: | + { "index" : { "_id": "doc-1" } } + { "department": "compsci", "staff": 12, "courses": [ { "name": "Object Oriented Programming", "credits": 3, "sessions": [ { "semester": "spr2021", "students": 37 }, { "semester": "fall2020", "students": 45} ] }, { "name": "Theory of Computation", "credits": 4, "sessions": [ { "semester": "spr2021", "students": 19 }, { "semester": "fall2020", "students": 14 } ] } ] } + { "index" : { "_id": "doc-42" } } + { "department": "math", "staff": 20, "courses": [ { "name": "Precalculus", "credits": 1, "sessions": [ { "semester": "spr2021", "students": 100 }, { "semester": "fall2020", "students": 134 } ] }, { "name": "Linear Algebra", "credits": 3, "sessions": [ { "semester": "spr2021", "students": 29 }, { "semester": "fall2020", "students": 23 } ] } ] } + +--- +"Inline syntax": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses.name: object oriented programming" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses.name: object oriented programming AND courses.credits > 3" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses.name: object oriented programming OR courses.credits > 3" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + +--- +"Nested field syntax": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses : { name: object oriented programming }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: object oriented programming AND credits > 3 }" + } + } + } + - match: { hits.total: 0 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: object oriented programming AND credits >= 3 }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: object oriented programming OR credits > 3 }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { NOT name: object oriented programming AND credits < 4 }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + +--- +"Several level of nesting field syntax": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: object oriented programming AND sessions.semester: spr2021 }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { sessions : { semester: spr2021 AND students < 20 } }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { + "kql": { + "query": "courses: { name: computation AND sessions : { semester: spr2021 AND students < 20 } }" + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } diff --git a/x-pack/plugin/logsdb/qa/with-basic/build.gradle b/x-pack/plugin/logsdb/qa/with-basic/build.gradle index 2fdeed338e1c..44ebd83bf4f4 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/build.gradle +++ b/x-pack/plugin/logsdb/qa/with-basic/build.gradle @@ -15,7 +15,7 @@ dependencies { tasks.named("javaRestTest").configure { // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) usesDefaultDistribution() } diff --git a/x-pack/plugin/mapper-constant-keyword/build.gradle b/x-pack/plugin/mapper-constant-keyword/build.gradle index ad9d3c2f8663..3b11d951fe37 100644 --- a/x-pack/plugin/mapper-constant-keyword/build.gradle +++ b/x-pack/plugin/mapper-constant-keyword/build.gradle @@ -18,7 +18,7 @@ dependencies { compileOnly project(path: xpackModule('core')) } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index e011723da623..faad1db82256 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -37,7 +37,7 @@ restResources { } } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/plugin/mapper-version/build.gradle b/x-pack/plugin/mapper-version/build.gradle index 69622762b9d5..fb760b3446df 100644 --- a/x-pack/plugin/mapper-version/build.gradle +++ b/x-pack/plugin/mapper-version/build.gradle @@ -25,7 +25,7 @@ dependencies { testImplementation project(path: xpackModule('analytics')) } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java new file mode 100644 index 000000000000..929dac6ee357 --- /dev/null +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.packageloader.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.tasks.RemovedTaskListener; +import org.elasticsearch.tasks.Task; + +public record DownloadTaskRemovedListener(ModelDownloadTask trackedTask, ActionListener listener) + implements + RemovedTaskListener { + + @Override + public void onRemoved(Task task) { + if (task.getId() == trackedTask.getId()) { + if (trackedTask.getTaskException() == null) { + listener.onResponse(AcknowledgedResponse.TRUE); + } else { + listener.onFailure(trackedTask.getTaskException()); + } + } + } +} diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java index 59977bd418e1..dd09c3cf65fe 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java @@ -13,6 +13,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.MlTasks; import java.io.IOException; import java.util.Map; @@ -51,9 +52,12 @@ public void writeTo(StreamOutput out) throws IOException { } private final AtomicReference downloadProgress = new AtomicReference<>(new DownLoadProgress(0, 0)); + private final String modelId; + private volatile Exception taskException; - public ModelDownloadTask(long id, String type, String action, String description, TaskId parentTaskId, Map headers) { - super(id, type, action, description, parentTaskId, headers); + public ModelDownloadTask(long id, String type, String action, String modelId, TaskId parentTaskId, Map headers) { + super(id, type, action, taskDescription(modelId), parentTaskId, headers); + this.modelId = modelId; } void setProgress(int totalParts, int downloadedParts) { @@ -65,4 +69,19 @@ public DownloadStatus getStatus() { return new DownloadStatus(downloadProgress.get()); } + public String getModelId() { + return modelId; + } + + public void setTaskException(Exception exception) { + this.taskException = exception; + } + + public Exception getTaskException() { + return taskException; + } + + public static String taskDescription(String modelId) { + return MlTasks.downloadModelTaskDescription(modelId); + } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index 76b7781b1cff..2a14a8761e35 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -30,7 +30,6 @@ import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.notifications.Level; @@ -42,6 +41,9 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -49,7 +51,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_ACTION; import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_TYPE; -import static org.elasticsearch.xpack.core.ml.MlTasks.downloadModelTaskDescription; public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction { @@ -57,6 +58,7 @@ public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction< private final Client client; private final CircuitBreakerService circuitBreakerService; + final Map> taskRemovedListenersByModelId; @Inject public TransportLoadTrainedModelPackage( @@ -81,6 +83,7 @@ public TransportLoadTrainedModelPackage( ); this.client = new OriginSettingClient(client, ML_ORIGIN); this.circuitBreakerService = circuitBreakerService; + taskRemovedListenersByModelId = new HashMap<>(); } @Override @@ -91,6 +94,12 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { + if (handleDownloadInProgress(request.getModelId(), request.isWaitForCompletion(), listener)) { + logger.debug("Existing download of model [{}] in progress", request.getModelId()); + // download in progress, nothing to do + return; + } + ModelDownloadTask downloadTask = createDownloadTask(request); try { @@ -107,7 +116,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A var downloadCompleteListener = request.isWaitForCompletion() ? listener : ActionListener.noop(); - importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask); + importModel(client, () -> unregisterTask(downloadTask), request, modelImporter, downloadTask, downloadCompleteListener); } catch (Exception e) { taskManager.unregister(downloadTask); listener.onFailure(e); @@ -124,22 +133,91 @@ private ParentTaskAssigningClient getParentTaskAssigningClient(Task originTask) return new ParentTaskAssigningClient(client, parentTaskId); } + /** + * Look for a current download task of the model and optionally wait + * for that task to complete if there is one. + * synchronized with {@code unregisterTask} to prevent the task being + * removed before the remove listener is added. + * @param modelId Model being downloaded + * @param isWaitForCompletion Wait until the download completes before + * calling the listener + * @param listener Model download listener + * @return True if a download task is in progress + */ + synchronized boolean handleDownloadInProgress( + String modelId, + boolean isWaitForCompletion, + ActionListener listener + ) { + var description = ModelDownloadTask.taskDescription(modelId); + var tasks = taskManager.getCancellableTasks().values(); + + ModelDownloadTask inProgress = null; + for (var task : tasks) { + if (description.equals(task.getDescription()) && task instanceof ModelDownloadTask downloadTask) { + inProgress = downloadTask; + break; + } + } + + if (inProgress != null) { + if (isWaitForCompletion == false) { + // Not waiting for the download to complete, it is enough that the download is in progress + // Respond now not when the download completes + listener.onResponse(AcknowledgedResponse.TRUE); + return true; + } + // Otherwise register a task removed listener which is called + // once the tasks is complete and unregistered + var tracker = new DownloadTaskRemovedListener(inProgress, listener); + taskRemovedListenersByModelId.computeIfAbsent(modelId, s -> new ArrayList<>()).add(tracker); + taskManager.registerRemovedTaskListener(tracker); + return true; + } + + return false; + } + + /** + * Unregister the completed task triggering any remove task listeners. + * This method is synchronized to prevent the task being removed while + * {@code waitForExistingDownload} is in progress. + * @param task The completed task + */ + synchronized void unregisterTask(ModelDownloadTask task) { + taskManager.unregister(task); // unregister will call the on remove function + + var trackers = taskRemovedListenersByModelId.remove(task.getModelId()); + if (trackers != null) { + for (var tracker : trackers) { + taskManager.unregisterRemovedTaskListener(tracker); + } + } + } + /** * This is package scope so that we can test the logic directly. - * This should only be called from the masterOperation method and the tests + * This should only be called from the masterOperation method and the tests. + * This method is static for testing. * * @param auditClient a client which should only be used to send audit notifications. This client cannot be associated with the passed * in task, that way when the task is cancelled the notification requests can * still be performed. If it is associated with the task (i.e. via ParentTaskAssigningClient), * then the requests will throw a TaskCancelledException. + * @param unregisterTaskFn Runnable to unregister the task. Because this is a static function + * a lambda is used rather than the instance method. + * @param request The download request + * @param modelImporter The importer + * @param task Download task + * @param listener Listener */ static void importModel( Client auditClient, - TaskManager taskManager, + Runnable unregisterTaskFn, Request request, ModelImporter modelImporter, - ActionListener listener, - Task task + ModelDownloadTask task, + ActionListener listener ) { final String modelId = request.getModelId(); final long relativeStartNanos = System.nanoTime(); @@ -155,9 +233,12 @@ static void importModel( Level.INFO ); listener.onResponse(AcknowledgedResponse.TRUE); - }, exception -> listener.onFailure(processException(auditClient, modelId, exception))); + }, exception -> { + task.setTaskException(exception); + listener.onFailure(processException(auditClient, modelId, exception)); + }); - modelImporter.doImport(ActionListener.runAfter(finishListener, () -> taskManager.unregister(task))); + modelImporter.doImport(ActionListener.runAfter(finishListener, unregisterTaskFn)); } static Exception processException(Client auditClient, String modelId, Exception e) { @@ -197,14 +278,7 @@ public TaskId getParentTask() { @Override public ModelDownloadTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new ModelDownloadTask( - id, - type, - action, - downloadModelTaskDescription(request.getModelId()), - parentTaskId, - headers - ); + return new ModelDownloadTask(id, type, action, request.getModelId(), parentTaskId, headers); } }, false); } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index cbcfd5b76077..3486ce6af0db 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -10,13 +10,19 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.ml.action.AuditMlNotificationAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; @@ -27,9 +33,13 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_ACTION; +import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_TYPE; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -37,6 +47,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportLoadTrainedModelPackageTests extends ESTestCase { private static final String MODEL_IMPORT_FAILURE_MSG_FORMAT = "Model importing failed due to %s [%s]"; @@ -44,17 +55,10 @@ public class TransportLoadTrainedModelPackageTests extends ESTestCase { public void testSendsFinishedUploadNotification() { var uploader = createUploader(null); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); var client = mock(Client.class); - TransportLoadTrainedModelPackage.importModel( - client, - taskManager, - createRequestWithWaiting(), - uploader, - ActionListener.noop(), - task - ); + TransportLoadTrainedModelPackage.importModel(client, () -> {}, createRequestWithWaiting(), uploader, task, ActionListener.noop()); var notificationArg = ArgumentCaptor.forClass(AuditMlNotificationAction.Request.class); // 2 notifications- the start and finish messages @@ -108,32 +112,63 @@ public void testSendsWarningNotificationForTaskCancelledException() throws Excep public void testCallsOnResponseWithAcknowledgedResponse() throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); ModelImporter uploader = createUploader(null); var responseRef = new AtomicReference(); var listener = ActionListener.wrap(responseRef::set, e -> fail("received an exception: " + e.getMessage())); - TransportLoadTrainedModelPackage.importModel(client, taskManager, createRequestWithWaiting(), uploader, listener, task); + TransportLoadTrainedModelPackage.importModel(client, () -> {}, createRequestWithWaiting(), uploader, task, listener); assertThat(responseRef.get(), is(AcknowledgedResponse.TRUE)); } public void testDoesNotCallListenerWhenNotWaitingForCompletion() { var uploader = mock(ModelImporter.class); var client = mock(Client.class); - var taskManager = mock(TaskManager.class); - var task = mock(Task.class); - + var task = mock(ModelDownloadTask.class); TransportLoadTrainedModelPackage.importModel( client, - taskManager, + () -> {}, createRequestWithoutWaiting(), uploader, - ActionListener.running(ESTestCase::fail), - task + task, + ActionListener.running(ESTestCase::fail) ); } + public void testWaitForExistingDownload() { + var taskManager = mock(TaskManager.class); + var modelId = "foo"; + var task = new ModelDownloadTask(1L, MODEL_IMPORT_TASK_TYPE, MODEL_IMPORT_TASK_ACTION, modelId, new TaskId("node", 1L), Map.of()); + when(taskManager.getCancellableTasks()).thenReturn(Map.of(1L, task)); + + var transportService = mock(TransportService.class); + when(transportService.getTaskManager()).thenReturn(taskManager); + + var action = new TransportLoadTrainedModelPackage( + transportService, + mock(ClusterService.class), + mock(ThreadPool.class), + mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), + mock(Client.class), + mock(CircuitBreakerService.class) + ); + + assertTrue(action.handleDownloadInProgress(modelId, true, ActionListener.noop())); + verify(taskManager).registerRemovedTaskListener(any()); + assertThat(action.taskRemovedListenersByModelId.entrySet(), hasSize(1)); + assertThat(action.taskRemovedListenersByModelId.get(modelId), hasSize(1)); + + // With wait for completion == false no new removed listener will be added + assertTrue(action.handleDownloadInProgress(modelId, false, ActionListener.noop())); + verify(taskManager, times(1)).registerRemovedTaskListener(any()); + assertThat(action.taskRemovedListenersByModelId.entrySet(), hasSize(1)); + assertThat(action.taskRemovedListenersByModelId.get(modelId), hasSize(1)); + + assertFalse(action.handleDownloadInProgress("no-task-for-this-one", randomBoolean(), ActionListener.noop())); + } + private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status, Level level) throws Exception { var esStatusException = new ElasticsearchStatusException(message, status, exception); @@ -152,7 +187,7 @@ private void assertNotificationAndOnFailure( ) throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); ModelImporter uploader = createUploader(thrownException); var failureRef = new AtomicReference(); @@ -160,7 +195,14 @@ private void assertNotificationAndOnFailure( (AcknowledgedResponse response) -> { fail("received a acknowledged response: " + response.toString()); }, failureRef::set ); - TransportLoadTrainedModelPackage.importModel(client, taskManager, createRequestWithWaiting(), uploader, listener, task); + TransportLoadTrainedModelPackage.importModel( + client, + () -> taskManager.unregister(task), + createRequestWithWaiting(), + uploader, + task, + listener + ); var notificationArg = ArgumentCaptor.forClass(AuditMlNotificationAction.Request.class); // 2 notifications- the starting message and the failure diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index e79a77129339..67c26c78a674 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -94,7 +94,7 @@ dependencies { } def mlCppVersion(){ - return (project.gradle.parent != null && BuildParams.isSnapshotBuild() == false) ? + return (project.gradle.parent != null && buildParams.isSnapshotBuild() == false) ? (project.version + "-SNAPSHOT") : project.version; } diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index 64970d18b5c8..3854c70b0f38 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -18,7 +18,7 @@ testClusters.configureEach { setting 'slm.history_index_enabled', 'false' } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/ml/qa/disabled/build.gradle b/x-pack/plugin/ml/qa/disabled/build.gradle index 232700d5f84a..0d1d8d6484af 100644 --- a/x-pack/plugin/ml/qa/disabled/build.gradle +++ b/x-pack/plugin/ml/qa/disabled/build.gradle @@ -12,7 +12,7 @@ testClusters.configureEach { setting 'xpack.ml.enabled', 'false' } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index bc22552d0d73..b43132c2daf5 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -12,7 +12,7 @@ dependencies { testImplementation project(':x-pack:qa') } -Version ccsCompatVersion = BuildParams.bwcVersions.minimumWireCompatibleVersion +Version ccsCompatVersion = buildParams.bwcVersions.minimumWireCompatibleVersion restResources { restApi { diff --git a/x-pack/plugin/ml/qa/single-node-tests/build.gradle b/x-pack/plugin/ml/qa/single-node-tests/build.gradle index 6979ec4dcbd3..5ed1c5179716 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/single-node-tests/build.gradle @@ -12,7 +12,7 @@ testClusters.configureEach { setting 'xpack.license.self_generated.type', 'trial' } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index ba4483493da1..e0405b174953 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.inference.InferenceWaitForAllocation; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; -import org.elasticsearch.xpack.ml.inference.adaptiveallocations.ScaleFromZeroFeatureFlag; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; @@ -277,13 +276,11 @@ private void inferAgainstAllocatedModel( boolean starting = adaptiveAllocationsScalerService.maybeStartAllocation(assignment); if (starting) { message += "; starting deployment of one allocation"; - - if (ScaleFromZeroFeatureFlag.isEnabled()) { - waitForAllocation.waitForAssignment( - new InferenceWaitForAllocation.WaitingRequest(request, responseBuilder, parentTaskId, listener) - ); - return; - } + logger.debug(message); + waitForAllocation.waitForAssignment( + new InferenceWaitForAllocation.WaitingRequest(request, responseBuilder, parentTaskId, listener) + ); + return; } logger.debug(message); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 5fd70ce71cd2..f01372ca4f24 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -190,11 +190,11 @@ protected void masterOperation( () -> "[" + request.getDeploymentId() + "] creating new assignment for model [" + request.getModelId() + "] failed", e ); - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException resourceAlreadyExistsException) { e = new ElasticsearchStatusException( "Cannot start deployment [{}] because it has already been started", RestStatus.CONFLICT, - e, + resourceAlreadyExistsException, request.getDeploymentId() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java deleted file mode 100644 index 4c446b65db9d..000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.adaptiveallocations; - -import org.elasticsearch.common.util.FeatureFlag; - -public class ScaleFromZeroFeatureFlag { - private ScaleFromZeroFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("ml_scale_from_zero"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManagerTests.java index 56cdcc88df91..15fb2b2b81f3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManagerTests.java @@ -138,16 +138,20 @@ public void testRunJob_TaskIsStopping() { when(task.isStopping()).thenReturn(true); when(task.getParams()).thenReturn(new StartDataFrameAnalyticsAction.TaskParams("data_frame_id", MlConfigVersion.CURRENT, false)); - processManager.runJob(task, dataFrameAnalyticsConfig, dataExtractorFactory, + processManager.runJob( + task, + dataFrameAnalyticsConfig, + dataExtractorFactory, ActionTestUtils.assertNoFailureListener(stepResponse -> { - assertThat(processManager.getProcessContextCount(), equalTo(0)); - assertThat(stepResponse.isTaskComplete(), is(true)); + assertThat(processManager.getProcessContextCount(), equalTo(0)); + assertThat(stepResponse.isTaskComplete(), is(true)); - InOrder inOrder = inOrder(task); - inOrder.verify(task).isStopping(); - inOrder.verify(task).getParams(); - verifyNoMoreInteractions(task); - })); + InOrder inOrder = inOrder(task); + inOrder.verify(task).isStopping(); + inOrder.verify(task).getParams(); + verifyNoMoreInteractions(task); + }) + ); } public void testRunJob_ProcessContextAlreadyExists() { diff --git a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java index 435530542c85..ca52db9331cf 100644 --- a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java +++ b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -27,10 +26,9 @@ public OTelIndexTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - super(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService); + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); } @Override diff --git a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java index 543102330bd0..67bd8c4e002d 100644 --- a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java +++ b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java @@ -48,14 +48,7 @@ public Collection createComponents(PluginServices services) { Settings settings = services.environment().settings(); ClusterService clusterService = services.clusterService(); registry.set( - new OTelIndexTemplateRegistry( - settings, - clusterService, - services.threadPool(), - services.client(), - services.xContentRegistry(), - services.featureService() - ) + new OTelIndexTemplateRegistry(settings, clusterService, services.threadPool(), services.client(), services.xContentRegistry()) ); if (enabled) { OTelIndexTemplateRegistry registryInstance = registry.get(); diff --git a/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle b/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle index 01264d784968..4683c13f1fc0 100644 --- a/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle @@ -38,7 +38,7 @@ tasks.named("javaRestTest") { systemProperty 'test.azure.container', azureContainer systemProperty 'test.azure.key', azureKey systemProperty 'test.azure.sas_token', azureSasToken - nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_repositories_metering_tests_" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_repositories_metering_tests_" + buildParams.testSeed } tasks.register("azureThirdPartyTest") { diff --git a/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle b/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle index b8c345c99b89..62fe47c08f5f 100644 --- a/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle @@ -35,7 +35,7 @@ if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) { tasks.named("javaRestTest").configure { systemProperty 'test.google.fixture', Boolean.toString(useFixture) systemProperty 'test.gcs.bucket', gcsBucket - nonInputProperties.systemProperty 'test.gcs.base_path', gcsBasePath + "_repositories_metering" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.gcs.base_path', gcsBasePath + "_repositories_metering" + buildParams.testSeed if (useFixture == false) { systemProperty 'test.google.account', serviceAccountFile } diff --git a/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle b/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle index 5f2bf66f31b2..3c58e6a06af6 100644 --- a/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle @@ -38,7 +38,7 @@ tasks.named("javaRestTest").configure { systemProperty 'test.s3.bucket', s3Bucket systemProperty("s3AccessKey", s3AccessKey) systemProperty("s3SecretKey", s3SecretKey) - nonInputProperties.systemProperty 'test.s3.base_path', s3BasePath ? s3BasePath + "_repositories_metering" + BuildParams.testSeed : 'base_path_integration_tests' + nonInputProperties.systemProperty 'test.s3.base_path', s3BasePath ? s3BasePath + "_repositories_metering" + buildParams.testSeed : 'base_path_integration_tests' } tasks.register("s3ThirdPartyTest").configure { dependsOn "javaRestTest" diff --git a/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle b/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle index 6774ef920f28..e2f77fae8922 100644 --- a/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle @@ -33,7 +33,7 @@ tasks.named("javaRestTest").configure { systemProperty 'test.azure.container', azureContainer systemProperty 'test.azure.key', azureKey systemProperty 'test.azure.sas_token', azureSasToken - nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_searchable_snapshots_tests_" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_searchable_snapshots_tests_" + buildParams.testSeed } tasks.register("azureThirdPartyTest") { diff --git a/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle index 3099f0787998..c0a420aff313 100644 --- a/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle @@ -29,7 +29,7 @@ if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) { tasks.named("javaRestTest").configure { systemProperty 'test.google.fixture', Boolean.toString(useFixture) systemProperty 'test.gcs.bucket', gcsBucket - nonInputProperties.systemProperty 'test.gcs.base_path', gcsBasePath + "_searchable_snapshots_tests" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.gcs.base_path', gcsBasePath + "_searchable_snapshots_tests" + buildParams.testSeed if (useFixture == false) { systemProperty 'test.google.account', serviceAccountFile diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle index 52ea873ae53b..e8d97da9a9e3 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle @@ -28,6 +28,6 @@ restResources { tasks.named("javaRestTest").configure { usesDefaultDistribution() - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle index 8919ddc6d29f..430df2a7e812 100644 --- a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle @@ -40,7 +40,7 @@ tasks.named("javaRestTest").configure { systemProperty("s3AccessKey", s3AccessKey) systemProperty("s3SecretKey", s3SecretKey) - nonInputProperties.systemProperty 'test.s3.base_path', s3BasePath ? s3BasePath + "_searchable_snapshots_tests" + BuildParams.testSeed : 'base_path_integration_tests' + nonInputProperties.systemProperty 'test.s3.base_path', s3BasePath ? s3BasePath + "_searchable_snapshots_tests" + buildParams.testSeed : 'base_path_integration_tests' } tasks.register("s3ThirdPartyTest") { diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java new file mode 100644 index 000000000000..3049fe830e72 --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java @@ -0,0 +1,281 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.s3; + +import fixture.s3.S3HttpFixture; +import io.netty.handler.codec.http.HttpMethod; + +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.MutableSettingsProvider; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.function.UnaryOperator; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.Matchers.allOf; + +public class S3SearchableSnapshotsCredentialsReloadIT extends ESRestTestCase { + + private static final String BUCKET = "S3SearchableSnapshotsCredentialsReloadIT-bucket"; + private static final String BASE_PATH = "S3SearchableSnapshotsCredentialsReloadIT-base-path"; + + public static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, "ignored"); + + private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .keystore(keystoreSettings) + .setting("xpack.searchable.snapshot.shared_cache.size", "4kB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "4kB") + .setting("xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive", "0ms") + .setting("xpack.security.enabled", "false") + .systemProperty("es.allow_insecure_settings", "true") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Before + public void skipFips() { + assumeFalse("getting these tests to run in a FIPS JVM is kinda fiddly and we don't really need the extra coverage", inFipsJvm()); + } + + public void testReloadCredentialsFromKeystore() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.putRepository(); + + // Set up initial credentials + final String accessKey1 = randomIdentifier(); + s3Fixture.setAccessKey(accessKey1); + keystoreSettings.put("s3.client.default.access_key", accessKey1); + keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Set up refreshed credentials + logger.info("--> update keystore contents"); + keystoreSettings.put("s3.client.default.access_key", accessKey2); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + public void testReloadCredentialsFromAlternativeClient() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.putRepository(); + + // Set up credentials + final String accessKey1 = randomIdentifier(); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + final String alternativeClient = randomValueOtherThan("default", ESTestCase::randomIdentifier); + + s3Fixture.setAccessKey(accessKey1); + keystoreSettings.put("s3.client.default.access_key", accessKey1); + keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); + keystoreSettings.put("s3.client." + alternativeClient + ".access_key", accessKey2); + keystoreSettings.put("s3.client." + alternativeClient + ".secret_key", randomIdentifier()); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Adjust repository to use new client + logger.info("--> update repository metadata"); + testHarness.putRepository(b -> b.put("client", alternativeClient)); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + public void testReloadCredentialsFromMetadata() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.warningsHandler = WarningsHandler.PERMISSIVE; + + // Set up credentials + final String accessKey1 = randomIdentifier(); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + + testHarness.putRepository(b -> b.put("access_key", accessKey1).put("secret_key", randomIdentifier())); + s3Fixture.setAccessKey(accessKey1); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Adjust repository to use new client + logger.info("--> update repository metadata"); + testHarness.putRepository(b -> b.put("access_key", accessKey2).put("secret_key", randomIdentifier())); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + private class TestHarness { + private final String mountedIndexName = randomIdentifier(); + private final String repositoryName = randomIdentifier(); + + @Nullable // to use the default + WarningsHandler warningsHandler; + + void putRepository() throws IOException { + putRepository(UnaryOperator.identity()); + } + + void putRepository(UnaryOperator settingsOperator) throws IOException { + // Register repository + final Request request = newXContentRequest( + HttpMethod.PUT, + "/_snapshot/" + repositoryName, + (b, p) -> b.field("type", "s3") + .startObject("settings") + .value( + settingsOperator.apply( + Settings.builder().put("bucket", BUCKET).put("base_path", BASE_PATH).put("endpoint", s3Fixture.getAddress()) + ).build() + ) + .endObject() + ); + request.addParameter("verify", "false"); // because we don't have access to the blob store yet + request.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warningsHandler)); + assertOK(client().performRequest(request)); + } + + void createFrozenSearchableSnapshotIndex() throws IOException { + // Create an index, large enough that its data is not all captured in the file headers + final String indexName = randomValueOtherThan(mountedIndexName, ESTestCase::randomIdentifier); + createIndex(indexName, indexSettings(1, 0).build()); + try (var bodyStream = new ByteArrayOutputStream()) { + for (int i = 0; i < 1024; i++) { + try (XContentBuilder bodyLineBuilder = new XContentBuilder(XContentType.JSON.xContent(), bodyStream)) { + bodyLineBuilder.startObject().startObject("index").endObject().endObject(); + } + bodyStream.write(0x0a); + try (XContentBuilder bodyLineBuilder = new XContentBuilder(XContentType.JSON.xContent(), bodyStream)) { + bodyLineBuilder.startObject().field("foo", "bar").endObject(); + } + bodyStream.write(0x0a); + } + bodyStream.flush(); + final Request request = new Request("PUT", indexName + "/_bulk"); + request.setEntity(new ByteArrayEntity(bodyStream.toByteArray(), ContentType.APPLICATION_JSON)); + client().performRequest(request); + } + + // Take a snapshot and delete the original index + final String snapshotName = randomIdentifier(); + final Request createSnapshotRequest = new Request(HttpPut.METHOD_NAME, "_snapshot/" + repositoryName + '/' + snapshotName); + createSnapshotRequest.addParameter("wait_for_completion", "true"); + createSnapshotRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warningsHandler)); + assertOK(client().performRequest(createSnapshotRequest)); + + deleteIndex(indexName); + + // Mount the snapshotted index as a searchable snapshot + final Request mountRequest = newXContentRequest( + HttpMethod.POST, + "/_snapshot/" + repositoryName + "/" + snapshotName + "/_mount", + (b, p) -> b.field("index", indexName).field("renamed_index", mountedIndexName) + ); + mountRequest.addParameter("wait_for_completion", "true"); + mountRequest.addParameter("storage", "shared_cache"); + assertOK(client().performRequest(mountRequest)); + ensureGreen(mountedIndexName); + } + + void ensureSearchSuccess() throws IOException { + final Request searchRequest = new Request("GET", mountedIndexName + "/_search"); + searchRequest.addParameter("size", "10000"); + assertEquals( + "bar", + ObjectPath.createFromResponse(assertOK(client().performRequest(searchRequest))).evaluate("hits.hits.0._source.foo") + ); + } + + void ensureSearchFailure() throws IOException { + assertOK(client().performRequest(new Request("POST", "/_searchable_snapshots/cache/clear"))); + final Request searchRequest = new Request("GET", mountedIndexName + "/_search"); + searchRequest.addParameter("size", "10000"); + assertThat( + expectThrows(ResponseException.class, () -> client().performRequest(searchRequest)).getMessage(), + allOf( + containsString("Bad access key"), + containsString("Status Code: 403"), + containsString("Error Code: AccessDenied"), + containsString("failed to read data from cache") + ) + ); + } + } + +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 8bb4c45e54ab..33982536cd63 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -543,9 +543,9 @@ public Map getRecoveryStateFactories() { return Map.of(SNAPSHOT_RECOVERY_STATE_FACTORY_KEY, SearchableSnapshotRecoveryState::new); } - public static final String CACHE_FETCH_ASYNC_THREAD_POOL_NAME = "searchable_snapshots_cache_fetch_async"; + public static final String CACHE_FETCH_ASYNC_THREAD_POOL_NAME = BlobStoreRepository.SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME; public static final String CACHE_FETCH_ASYNC_THREAD_POOL_SETTING = "xpack.searchable_snapshots.cache_fetch_async_thread_pool"; - public static final String CACHE_PREWARMING_THREAD_POOL_NAME = "searchable_snapshots_cache_prewarming"; + public static final String CACHE_PREWARMING_THREAD_POOL_NAME = BlobStoreRepository.SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME; public static final String CACHE_PREWARMING_THREAD_POOL_SETTING = "xpack.searchable_snapshots.cache_prewarming_thread_pool"; public static ScalingExecutorBuilder[] executorBuilders(Settings settings) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java new file mode 100644 index 000000000000..335c8e311ace --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.store; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.support.FilterBlobContainer; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; + +import java.io.IOException; +import java.io.InputStream; +import java.util.function.Supplier; + +public class BlobContainerSupplier implements Supplier { + + private static final Logger logger = LogManager.getLogger(BlobContainerSupplier.class); + + private final Supplier repositorySupplier; + private final IndexId indexId; + private final int shardId; + + private volatile LastKnownState lastKnownState = new LastKnownState(null, null); + + public BlobContainerSupplier(Supplier repositorySupplier, IndexId indexId, int shardId) { + this.repositorySupplier = repositorySupplier; + this.indexId = indexId; + this.shardId = shardId; + } + + @Override + public BlobContainer get() { + final LastKnownState lastKnownState = this.lastKnownState; + final BlobStoreRepository currentRepository = repositorySupplier.get(); + + if (lastKnownState.blobStoreRepository() == currentRepository) { + return lastKnownState.blobContainer(); + } else { + return refreshAndGet(); + } + } + + private synchronized BlobContainer refreshAndGet() { + final BlobStoreRepository currentRepository = repositorySupplier.get(); + if (lastKnownState.blobStoreRepository() == currentRepository) { + return lastKnownState.blobContainer(); + } else { + logger.debug("creating new blob container [{}][{}][{}]", currentRepository.getMetadata().name(), indexId, shardId); + final BlobContainer newContainer = new RateLimitingBlobContainer( + currentRepository, + currentRepository.shardContainer(indexId, shardId) + ); + lastKnownState = new LastKnownState(currentRepository, newContainer); + return newContainer; + } + } + + private record LastKnownState(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) {} + + /** + * A {@link FilterBlobContainer} that uses {@link BlobStoreRepository#maybeRateLimitRestores(InputStream)} to limit the rate at which + * blobs are read from the repository. + */ + private static class RateLimitingBlobContainer extends FilterBlobContainer { + + private final BlobStoreRepository blobStoreRepository; + + RateLimitingBlobContainer(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) { + super(blobContainer); + this.blobStoreRepository = blobStoreRepository; + } + + @Override + protected BlobContainer wrapChild(BlobContainer child) { + return new RateLimitingBlobContainer(blobStoreRepository, child); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName)); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName, position, length)); + } + } +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java new file mode 100644 index 000000000000..63522ce2309a --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.store; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots; + +import java.util.Map; +import java.util.Objects; +import java.util.function.Supplier; + +public class RepositorySupplier implements Supplier { + + private static final Logger logger = LogManager.getLogger(BlobContainerSupplier.class); + + private final RepositoriesService repositoriesService; + + private final String repositoryName; + + @Nullable // if repository specified only by name + private final String repositoryUuid; + + private volatile String repositoryNameHint; + + public RepositorySupplier(RepositoriesService repositoriesService, String repositoryName, String repositoryUuid) { + this.repositoriesService = Objects.requireNonNull(repositoriesService); + this.repositoryName = Objects.requireNonNull(repositoryName); + this.repositoryUuid = repositoryUuid; + this.repositoryNameHint = repositoryName; + } + + @Override + public BlobStoreRepository get() { + return SearchableSnapshots.getSearchableRepository(getRepository()); + } + + private Repository getRepository() { + if (repositoryUuid == null) { + // repository containing pre-7.12 snapshots has no UUID so we assume it matches by name + final Repository repository = repositoriesService.repository(repositoryName); + assert repository.getMetadata().name().equals(repositoryName) : repository.getMetadata().name() + " vs " + repositoryName; + return repository; + } + + final Map repositoriesByName = repositoriesService.getRepositories(); + + final String currentRepositoryNameHint = repositoryNameHint; + final Repository repositoryByLastKnownName = repositoriesByName.get(currentRepositoryNameHint); + if (repositoryByLastKnownName != null) { + final var foundRepositoryUuid = repositoryByLastKnownName.getMetadata().uuid(); + if (Objects.equals(repositoryUuid, foundRepositoryUuid)) { + return repositoryByLastKnownName; + } + } + + for (final Repository repository : repositoriesByName.values()) { + if (repository.getMetadata().uuid().equals(repositoryUuid)) { + final var newRepositoryName = repository.getMetadata().name(); + logger.debug( + "getRepository: repository [{}] with uuid [{}] replacing repository [{}]", + newRepositoryName, + repositoryUuid, + currentRepositoryNameHint + ); + repositoryNameHint = repository.getMetadata().name(); + return repository; + } + } + + throw new RepositoryMissingException("uuid [" + repositoryUuid + "], original name [" + repositoryName + "]"); + } +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java index b56cd28e9dc6..bbdf371e1ed7 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java @@ -24,8 +24,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.OperationPurpose; -import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.settings.Settings; @@ -43,8 +41,6 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.repositories.Repository; -import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.threadpool.ThreadPool; @@ -62,7 +58,6 @@ import java.io.FileNotFoundException; import java.io.IOException; -import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; @@ -134,7 +129,6 @@ public class SearchableSnapshotDirectory extends BaseDirectory { // volatile fields are updated once under `this` lock, all together, iff loaded is not true. private volatile BlobStoreIndexShardSnapshot snapshot; - private volatile BlobContainer blobContainer; private volatile boolean loaded; private volatile SearchableSnapshotRecoveryState recoveryState; @@ -182,7 +176,6 @@ public SearchableSnapshotDirectory( private synchronized boolean invariant() { assert loaded != (snapshot == null); - assert loaded != (blobContainer == null); assert loaded != (recoveryState == null); return true; } @@ -212,7 +205,6 @@ public boolean loadSnapshot( synchronized (this) { alreadyLoaded = this.loaded; if (alreadyLoaded == false) { - this.blobContainer = blobContainerSupplier.get(); this.snapshot = snapshotSupplier.get(); this.loaded = true; cleanExistingRegularShardFiles(); @@ -226,14 +218,12 @@ public boolean loadSnapshot( return alreadyLoaded == false; } - @Nullable public BlobContainer blobContainer() { - final BlobContainer blobContainer = this.blobContainer; + final BlobContainer blobContainer = blobContainerSupplier.get(); assert blobContainer != null; return blobContainer; } - @Nullable public BlobStoreIndexShardSnapshot snapshot() { final BlobStoreIndexShardSnapshot snapshot = this.snapshot; assert snapshot != null; @@ -590,23 +580,15 @@ public static Directory create( ); } - Repository repository; - final String repositoryName; - if (SNAPSHOT_REPOSITORY_UUID_SETTING.exists(indexSettings.getSettings())) { - repository = repositoryByUuid( - repositories.getRepositories(), - SNAPSHOT_REPOSITORY_UUID_SETTING.get(indexSettings.getSettings()), - SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()) - ); - repositoryName = repository.getMetadata().name(); - } else { - // repository containing pre-7.12 snapshots has no UUID so we assume it matches by name - repositoryName = SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()); - repository = repositories.repository(repositoryName); - assert repository.getMetadata().name().equals(repositoryName) : repository.getMetadata().name() + " vs " + repositoryName; - } + final Supplier repositorySupplier = new RepositorySupplier( + repositories, + SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()), + SNAPSHOT_REPOSITORY_UUID_SETTING.exists(indexSettings.getSettings()) + ? SNAPSHOT_REPOSITORY_UUID_SETTING.get(indexSettings.getSettings()) + : null + ); - final BlobStoreRepository blobStoreRepository = SearchableSnapshots.getSearchableRepository(repository); + final BlobStoreRepository initialRepository = repositorySupplier.get(); final IndexId indexId = new IndexId( SNAPSHOT_INDEX_NAME_SETTING.get(indexSettings.getSettings()), @@ -617,14 +599,14 @@ public static Directory create( SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings.getSettings()) ); - final LazyInitializable lazyBlobContainer = new LazyInitializable<>( - () -> new RateLimitingBlobContainer( - blobStoreRepository, - blobStoreRepository.shardContainer(indexId, shardPath.getShardId().id()) - ) + final Supplier blobContainerSupplier = new BlobContainerSupplier( + repositorySupplier, + indexId, + shardPath.getShardId().id() ); + final LazyInitializable lazySnapshot = new LazyInitializable<>( - () -> blobStoreRepository.loadShardSnapshot(lazyBlobContainer.getOrCompute(), snapshotId) + () -> repositorySupplier.get().loadShardSnapshot(blobContainerSupplier.get(), snapshotId) ); final Path cacheDir = CacheService.getShardCachePath(shardPath).resolve(snapshotId.getUUID()); @@ -632,10 +614,10 @@ public static Directory create( return new InMemoryNoOpCommitDirectory( new SearchableSnapshotDirectory( - lazyBlobContainer::getOrCompute, + blobContainerSupplier, lazySnapshot::getOrCompute, blobStoreCacheService, - repositoryName, + initialRepository.getMetadata().name(), snapshotId, indexId, shardPath.getShardId(), @@ -690,42 +672,4 @@ public void putCachedBlob(String name, ByteRange range, BytesReference content, public SharedBlobCacheService.CacheFile getFrozenCacheFile(String fileName, long length) { return sharedBlobCacheService.getCacheFile(createCacheKey(fileName), length); } - - private static Repository repositoryByUuid(Map repositories, String repositoryUuid, String originalName) { - for (Repository repository : repositories.values()) { - if (repository.getMetadata().uuid().equals(repositoryUuid)) { - return repository; - } - } - throw new RepositoryMissingException("uuid [" + repositoryUuid + "], original name [" + originalName + "]"); - } - - /** - * A {@link FilterBlobContainer} that uses {@link BlobStoreRepository#maybeRateLimitRestores(InputStream)} to limit the rate at which - * blobs are read from the repository. - */ - private static class RateLimitingBlobContainer extends FilterBlobContainer { - - private final BlobStoreRepository blobStoreRepository; - - RateLimitingBlobContainer(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) { - super(blobContainer); - this.blobStoreRepository = blobStoreRepository; - } - - @Override - protected BlobContainer wrapChild(BlobContainer child) { - return new RateLimitingBlobContainer(blobStoreRepository, child); - } - - @Override - public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { - return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName)); - } - - @Override - public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { - return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName, position, length)); - } - } } diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index dcf3c7305dbc..8fd3dd29f87a 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -36,7 +36,7 @@ tasks.named("test").configure { systemProperty 'tests.security.manager', 'false' // the main code under test runs without the SecurityManager } -if (BuildParams.inFipsJvm) { +if (buildParams.inFipsJvm) { tasks.named("test").configure { enabled = false } diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle index f751fcd0a655..f53ff7027f12 100644 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle +++ b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle @@ -6,7 +6,7 @@ */ apply plugin: 'elasticsearch.build' -apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'com.gradleup.shadow' // See the build.gradle file in the parent directory for an explanation of this unusual build diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part2/build.gradle b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part2/build.gradle index c4c0f2ebd2fe..d24299a3847d 100644 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part2/build.gradle +++ b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part2/build.gradle @@ -6,7 +6,7 @@ */ apply plugin: 'elasticsearch.build' -apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'com.gradleup.shadow' // See the build.gradle file in the parent directory for an explanation of this unusual build diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle index 580ca4505521..4418bd32e64c 100644 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle +++ b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle @@ -6,7 +6,7 @@ */ apply plugin: 'elasticsearch.build' -apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'com.gradleup.shadow' // See the build.gradle file in the parent directory for an explanation of this unusual build diff --git a/x-pack/plugin/security/qa/basic-enable-security/build.gradle b/x-pack/plugin/security/qa/basic-enable-security/build.gradle index 5957216a3e12..a6930d38d41e 100644 --- a/x-pack/plugin/security/qa/basic-enable-security/build.gradle +++ b/x-pack/plugin/security/qa/basic-enable-security/build.gradle @@ -16,7 +16,7 @@ dependencies { tasks.named("javaRestTest").configure { // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) usesDefaultDistribution() } diff --git a/x-pack/plugin/security/qa/multi-cluster/build.gradle b/x-pack/plugin/security/qa/multi-cluster/build.gradle index b8eccb14819a..8ee449d39dcc 100644 --- a/x-pack/plugin/security/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/security/qa/multi-cluster/build.gradle @@ -35,7 +35,7 @@ tasks.named("javaRestTest") { exclude '**/RemoteClusterSecurityBWCToRCS2ClusterRestIT.class' } -BuildParams.bwcVersions.withWireCompatible() { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible() { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/x-pack/plugin/security/qa/profile/build.gradle b/x-pack/plugin/security/qa/profile/build.gradle index ac821e670fde..7465ef991725 100644 --- a/x-pack/plugin/security/qa/profile/build.gradle +++ b/x-pack/plugin/security/qa/profile/build.gradle @@ -7,7 +7,7 @@ dependencies { javaRestTestImplementation project(':x-pack:plugin:security') } -boolean literalUsername = BuildParams.random.nextBoolean() +boolean literalUsername = buildParams.random.nextBoolean() tasks.named("javaRestTest").configure { usesDefaultDistribution() diff --git a/x-pack/plugin/security/qa/security-basic/build.gradle b/x-pack/plugin/security/qa/security-basic/build.gradle index 7684d879671a..30751705bd75 100644 --- a/x-pack/plugin/security/qa/security-basic/build.gradle +++ b/x-pack/plugin/security/qa/security-basic/build.gradle @@ -13,7 +13,7 @@ tasks.named('javaRestTest') { } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/security/qa/security-disabled/build.gradle b/x-pack/plugin/security/qa/security-disabled/build.gradle index eba70753c9f2..0a05eae479d3 100644 --- a/x-pack/plugin/security/qa/security-disabled/build.gradle +++ b/x-pack/plugin/security/qa/security-disabled/build.gradle @@ -17,5 +17,5 @@ dependencies { tasks.named("javaRestTest").configure { usesDefaultDistribution() // Test clusters run with security disabled - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/plugin/security/qa/tls-basic/build.gradle b/x-pack/plugin/security/qa/tls-basic/build.gradle index fbe91009011e..e3b51bde45cc 100644 --- a/x-pack/plugin/security/qa/tls-basic/build.gradle +++ b/x-pack/plugin/security/qa/tls-basic/build.gradle @@ -7,7 +7,7 @@ dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 0b387a738a2c..ef66392a8726 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1048,8 +1048,6 @@ Collection createComponents( getClock(), client, systemIndices.getProfileIndexManager(), - clusterService, - featureService, realms ); components.add(profileService); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java index d0292f32cd75..53ecafa28071 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java @@ -7,18 +7,14 @@ package org.elasticsearch.xpack.security; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MIGRATION_FRAMEWORK; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLE_MAPPING_CLEANUP; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.VERSION_SECURITY_PROFILE_ORIGIN; public class SecurityFeatures implements FeatureSpecification { @@ -26,9 +22,4 @@ public class SecurityFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(SECURITY_ROLE_MAPPING_CLEANUP, SECURITY_ROLES_METADATA_FLATTENED, SECURITY_MIGRATION_FRAMEWORK); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(SECURITY_PROFILE_ORIGIN_FEATURE, VERSION_SECURITY_PROFILE_ORIGIN); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index b347c278aae0..a3ee313c7f1d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -35,7 +35,6 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -45,7 +44,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; @@ -100,14 +98,12 @@ import static org.elasticsearch.action.bulk.TransportSingleItemBulkWriteAction.toSingleItemBulkRequest; import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.security.authc.Authentication.isFileOrNativeRealm; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.PRIMARY_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.SEARCH_SHARDS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ALIAS; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE; public class ProfileService { private static final Logger logger = LogManager.getLogger(ProfileService.class); @@ -120,26 +116,14 @@ public class ProfileService { private final Clock clock; private final Client client; private final SecurityIndexManager profileIndex; - private final ClusterService clusterService; - private final FeatureService featureService; private final Function domainConfigLookup; private final Function realmRefLookup; - public ProfileService( - Settings settings, - Clock clock, - Client client, - SecurityIndexManager profileIndex, - ClusterService clusterService, - FeatureService featureService, - Realms realms - ) { + public ProfileService(Settings settings, Clock clock, Client client, SecurityIndexManager profileIndex, Realms realms) { this.settings = settings; this.clock = clock; this.client = client; this.profileIndex = profileIndex; - this.clusterService = clusterService; - this.featureService = featureService; this.domainConfigLookup = realms::getDomainConfig; this.realmRefLookup = realms::getRealmRef; } @@ -273,7 +257,7 @@ public void suggestProfile(SuggestProfilesRequest request, TaskId parentTaskId, listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { @@ -403,7 +387,7 @@ public void usageStats(ActionListener> listener) { listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportMultiSearchAction.TYPE, multiSearchRequest, ActionListener.wrap(multiSearchResponse -> { @@ -484,7 +468,7 @@ private void getVersionedDocument(String uid, ActionListener listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportGetAction.TYPE, getRequest, ActionListener.wrap(response -> { @@ -514,7 +498,7 @@ private void getVersionedDocuments(Collection uids, ActionListener { frozenProfileIndex.checkIndexVersionThenExecute( listener::onFailure, - () -> new OriginSettingClient(client, getActionOrigin()).prepareMultiGet() + () -> new OriginSettingClient(client, SECURITY_PROFILE_ORIGIN).prepareMultiGet() .addIds(frozenProfileIndex.aliasName(), uids.stream().map(ProfileService::uidToDocId).toArray(String[]::new)) .execute(ActionListener.wrap(multiGetResponse -> { List retrievedDocs = new ArrayList<>(multiGetResponse.getResponses().length); @@ -589,7 +573,7 @@ private void searchVersionedDocumentsForSubjects( subjects.forEach(subject -> multiSearchRequest.add(buildSearchRequestForSubject(subject))); executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportMultiSearchAction.TYPE, multiSearchRequest, ActionListener.wrap( @@ -742,7 +726,7 @@ void createNewProfile(Subject subject, String uid, ActionListener liste listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(indexResponse -> { @@ -1007,7 +991,7 @@ void doUpdate(UpdateRequest updateRequest, ActionListener listen listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportUpdateAction.TYPE, updateRequest, ActionListener.wrap(updateResponse -> { @@ -1019,15 +1003,6 @@ void doUpdate(UpdateRequest updateRequest, ActionListener listen ); } - private String getActionOrigin() { - // profile origin and user is not available before v8.3.0 - if (featureService.clusterHasFeature(clusterService.state(), SECURITY_PROFILE_ORIGIN_FEATURE)) { - return SECURITY_PROFILE_ORIGIN; - } else { - return SECURITY_ORIGIN; - } - } - private static String uidToDocId(String uid) { return DOC_ID_PREFIX + uid; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 609e6696bcb0..7b3f6a8d2ae5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -57,8 +56,6 @@ public class SecuritySystemIndices { public static final String INTERNAL_SECURITY_PROFILE_INDEX_8 = ".security-profile-8"; public static final String SECURITY_PROFILE_ALIAS = ".security-profile"; - public static final Version VERSION_SECURITY_PROFILE_ORIGIN = Version.V_8_3_0; - public static final NodeFeature SECURITY_PROFILE_ORIGIN_FEATURE = new NodeFeature("security.security_profile_origin"); public static final NodeFeature SECURITY_MIGRATION_FRAMEWORK = new NodeFeature("security.migration_framework"); public static final NodeFeature SECURITY_ROLES_METADATA_FLATTENED = new NodeFeature("security.roles_metadata_flattened"); public static final NodeFeature SECURITY_ROLE_MAPPING_CLEANUP = new NodeFeature("security.role_mapping_cleanup"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenActionTests.java index 6b9594c1c68e..87651a96d75a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenActionTests.java @@ -82,8 +82,9 @@ public void testInvalidateTokensWhenIndexUnavailable() throws Exception { when(securityIndex.isAvailable(SecurityIndexManager.Availability.SEARCH_SHARDS)).thenReturn(false); when(securityIndex.indexExists()).thenReturn(true); when(securityIndex.defensiveCopy()).thenReturn(securityIndex); - when(securityIndex.getUnavailableReason(SecurityIndexManager.Availability.PRIMARY_SHARDS)) - .thenReturn(new ElasticsearchException("simulated")); + when(securityIndex.getUnavailableReason(SecurityIndexManager.Availability.PRIMARY_SHARDS)).thenReturn( + new ElasticsearchException("simulated") + ); final TokenService tokenService = new TokenService( SETTINGS, Clock.systemUTC(), @@ -102,8 +103,11 @@ public void testInvalidateTokensWhenIndexUnavailable() throws Exception { Tuple newTokenBytes = tokenService.getRandomTokenBytes(true); InvalidateTokenRequest request = new InvalidateTokenRequest( - tokenService.prependVersionAndEncodeAccessToken(TransportVersion.current(), newTokenBytes.v1()), - ACCESS_TOKEN.getValue(), null, null); + tokenService.prependVersionAndEncodeAccessToken(TransportVersion.current(), newTokenBytes.v1()), + ACCESS_TOKEN.getValue(), + null, + null + ); PlainActionFuture accessTokenfuture = new PlainActionFuture<>(); action.doExecute(null, request, accessTokenfuture); ElasticsearchSecurityException ese = expectThrows(ElasticsearchSecurityException.class, accessTokenfuture::actionGet); @@ -148,8 +152,11 @@ public void testInvalidateTokensWhenIndexClosed() throws Exception { Tuple newTokenBytes = tokenService.getRandomTokenBytes(true); InvalidateTokenRequest request = new InvalidateTokenRequest( - tokenService.prependVersionAndEncodeAccessToken(TransportVersion.current(), newTokenBytes.v1()), - ACCESS_TOKEN.getValue(), null, null); + tokenService.prependVersionAndEncodeAccessToken(TransportVersion.current(), newTokenBytes.v1()), + ACCESS_TOKEN.getValue(), + null, + null + ); PlainActionFuture accessTokenfuture = new PlainActionFuture<>(); action.doExecute(null, request, accessTokenfuture); ElasticsearchSecurityException ese = expectThrows(ElasticsearchSecurityException.class, accessTokenfuture::actionGet); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index cd6c88cf525a..7b66a95609b0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -2041,8 +2041,14 @@ public void testExpiredToken() throws Exception { .user(new User("creator")) .realmRef(new RealmRef("test", "test", "test")) .build(false); - tokenService.createOAuth2Tokens(newTokenBytes.v1(), newTokenBytes.v2(), expected, originatingAuth, Collections.emptyMap(), - tokenFuture); + tokenService.createOAuth2Tokens( + newTokenBytes.v1(), + newTokenBytes.v2(), + expected, + originatingAuth, + Collections.emptyMap(), + tokenFuture + ); } String token = tokenFuture.get().getAccessToken(); mockGetTokenFromAccessTokenBytes(tokenService, newTokenBytes.v1(), expected, Map.of(), true, null, client); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java index 7219561dcf9d..aed39b24f217 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java @@ -76,11 +76,13 @@ public void init() throws Exception { } public void testAuthenticateThrowsOnUnsupportedMinVersions() throws IOException { - when(clusterService.state().getMinTransportVersion()).thenReturn(TransportVersionUtils.randomVersionBetween( + when(clusterService.state().getMinTransportVersion()).thenReturn( + TransportVersionUtils.randomVersionBetween( random(), TransportVersions.MINIMUM_COMPATIBLE, TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - )); + ) + ); final var authcContext = mock(Authenticator.Context.class, Mockito.RETURNS_DEEP_STUBS); when(authcContext.getThreadContext()).thenReturn(threadContext); final var crossClusterAccessHeaders = new CrossClusterAccessHeaders( @@ -93,12 +95,14 @@ public void testAuthenticateThrowsOnUnsupportedMinVersions() throws IOException when(auditableRequest.exceptionProcessingRequest(any(), any())).thenAnswer( i -> new ElasticsearchSecurityException("potato", (Exception) i.getArguments()[0]) ); - doAnswer(invocationOnMock -> new Authenticator.Context( + doAnswer( + invocationOnMock -> new Authenticator.Context( threadContext, auditableRequest, mock(Realms.class), (AuthenticationToken) invocationOnMock.getArguments()[2] - )).when(authenticationService).newContext(anyString(), any(), any()); + ) + ).when(authenticationService).newContext(anyString(), any(), any()); final PlainActionFuture future = new PlainActionFuture<>(); crossClusterAccessAuthenticationService.authenticate("action", mock(TransportRequest.class), future); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticatorTests.java index b35a2f8ccc4d..02f397c23d3b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsAuthenticatorTests.java @@ -326,7 +326,7 @@ public void testRecordingFailedAuthenticationMetric() { @SuppressWarnings("unchecked") final ActionListener> listener = (ActionListener>) invocationOnMock .getArguments()[1]; - listener.onResponse(AuthenticationResult.unsuccessful("unsuccessful realms authentication", null)); + listener.onResponse(AuthenticationResult.unsuccessful("unsuccessful realms authentication", null)); return null; }).when(unsuccessfulRealm).authenticate(eq(authenticationToken), any()); @@ -337,7 +337,7 @@ public void testRecordingFailedAuthenticationMetric() { final PlainActionFuture> future = new PlainActionFuture<>(); realmsAuthenticator.authenticate(context, future); - var e = expectThrows(ElasticsearchSecurityException.class, () -> future.actionGet()); + var e = expectThrows(ElasticsearchSecurityException.class, () -> future.actionGet()); assertThat(e, sameInstance(exception)); assertSingleFailedAuthMetric( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 73a45dc20ac4..ed3949450cb9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -363,9 +363,7 @@ public void testGetPrivilegesWillOnlyWaitOnUnavailableShardException() { public void testGetPrivilegesFailsAfterWaitOnUnavailableShardException() { when(securityIndex.isAvailable(SecurityIndexManager.Availability.SEARCH_SHARDS)).thenReturn(false).thenReturn(false); - when(securityIndex.getUnavailableReason(SecurityIndexManager.Availability.SEARCH_SHARDS)).thenReturn( - unavailableShardsException() - ); + when(securityIndex.getUnavailableReason(SecurityIndexManager.Availability.SEARCH_SHARDS)).thenReturn(unavailableShardsException()); doAnswer(invocation -> { @SuppressWarnings("unchecked") final var listener = (ActionListener) invocation.getArguments()[0]; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index f076dc24e5d5..6da1ddb61f11 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -36,9 +36,6 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -48,7 +45,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -88,7 +84,6 @@ import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.profile.ProfileDocument.ProfileDocumentUser; import org.elasticsearch.xpack.security.support.SecurityIndexManager; -import org.elasticsearch.xpack.security.support.SecuritySystemIndices; import org.elasticsearch.xpack.security.test.SecurityMocks; import org.hamcrest.Matchers; import org.junit.After; @@ -115,7 +110,6 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.concurrent.ThreadContext.ACTION_ORIGIN_TRANSIENT_NAME; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.security.support.Validation.VALID_NAME_CHARS; import static org.elasticsearch.xpack.security.Security.SECURITY_CRYPTO_THREAD_POOL_NAME; @@ -187,7 +181,6 @@ public class ProfileServiceTests extends ESTestCase { private SecurityIndexManager profileIndex; private ProfileService profileService; Function realmRefLookup; - private boolean useProfileOrigin; @Before public void prepare() { @@ -208,29 +201,11 @@ public void prepare() { when(client.threadPool()).thenReturn(threadPool); when(client.prepareSearch(SECURITY_PROFILE_ALIAS)).thenReturn(new SearchRequestBuilder(client).setIndices(SECURITY_PROFILE_ALIAS)); this.profileIndex = SecurityMocks.mockSecurityIndexManager(SECURITY_PROFILE_ALIAS); - final ClusterService clusterService = mock(ClusterService.class); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); - when(clusterState.nodes()).thenReturn(discoveryNodes); - useProfileOrigin = randomBoolean(); - FeatureService featureService = mock(FeatureService.class); - when(featureService.clusterHasFeature(any(), eq(SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE))).thenReturn( - useProfileOrigin - ); realmRefLookup = realmIdentifier -> null; Realms realms = mock(Realms.class); when(realms.getDomainConfig(anyString())).then(args -> new DomainConfig(args.getArgument(0), Set.of(), false, null)); when(realms.getRealmRef(any(RealmConfig.RealmIdentifier.class))).then(args -> realmRefLookup.apply(args.getArgument(0))); - this.profileService = new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - clusterService, - featureService, - realms - ); + this.profileService = new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms); } @After @@ -331,10 +306,7 @@ public void testGetProfileSubjectsWithMissingUids() throws Exception { final Collection allProfileUids = randomList(1, 5, () -> randomAlphaOfLength(20)); final Collection missingProfileUids = randomSubsetOf(allProfileUids); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; List responses = new ArrayList<>(); for (MultiGetRequest.Item item : multiGetRequest.getItems()) { @@ -397,10 +369,7 @@ public void testGetProfileSubjectsWithMissingUids() throws Exception { public void testGetProfileSubjectWithFailures() throws Exception { final ElasticsearchException mGetException = new ElasticsearchException("mget Exception"); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(mGetException); return null; @@ -413,10 +382,7 @@ public void testGetProfileSubjectWithFailures() throws Exception { final Collection errorProfileUids = randomSubsetOf(allProfileUids); final Collection missingProfileUids = Sets.difference(Set.copyOf(allProfileUids), Set.copyOf(errorProfileUids)); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; List responses = new ArrayList<>(); for (MultiGetRequest.Item item : multiGetRequest.getItems()) { @@ -504,15 +470,7 @@ public void testLiteralUsernameWillThrowOnDuplicate() throws IOException { final Subject subject = new Subject(AuthenticationTestHelper.randomUser(), AuthenticationTestHelper.randomRealmRef(true)); Realms realms = mock(Realms.class); when(realms.getDomainConfig(anyString())).then(args -> new DomainConfig(args.getArgument(0), Set.of(), true, "suffix")); - final ProfileService service = new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - mock(ClusterService.class), - mock(FeatureService.class), - realms - ); + final ProfileService service = new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms); final PlainActionFuture future = new PlainActionFuture<>(); service.maybeIncrementDifferentiatorAndCreateNewProfile( subject, @@ -593,10 +551,7 @@ public void testBuildSearchRequest() { public void testSecurityProfileOrigin() { // Activate profile doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); @SuppressWarnings("unchecked") final ActionListener listener = (ActionListener) invocation.getArguments()[2]; var resp = new MultiSearchResponse( @@ -616,10 +571,7 @@ public void testSecurityProfileOrigin() { final RuntimeException expectedException = new RuntimeException("expected"); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -632,10 +584,7 @@ public void testSecurityProfileOrigin() { // Update doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -647,10 +596,7 @@ public void testSecurityProfileOrigin() { // Suggest doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -675,17 +621,7 @@ public void testActivateProfileWithDifferentUidFormats() throws IOException { return new DomainConfig(domainName, Set.of(), true, "suffix"); } }); - final ProfileService service = spy( - new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - mock(ClusterService.class), - mock(FeatureService.class), - realms - ) - ); + final ProfileService service = spy(new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms)); doAnswer(invocation -> { @SuppressWarnings("unchecked") @@ -1098,10 +1034,7 @@ public void testProfileSearchForApiKeyOwnerWithoutDomain() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; assertThat(multiSearchRequest.requests(), iterableWithSize(1)); assertThat(multiSearchRequest.requests().get(0).source().query(), instanceOf(BoolQueryBuilder.class)); @@ -1153,10 +1086,7 @@ public void testProfileSearchForApiKeyOwnerWithDomain() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; assertThat(multiSearchRequest.requests(), iterableWithSize(1)); assertThat(multiSearchRequest.requests().get(0).source().query(), instanceOf(BoolQueryBuilder.class)); @@ -1218,10 +1148,7 @@ public void testProfileSearchForOwnerOfMultipleApiKeys() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; // a single search request for a single owner of multiple keys assertThat(multiSearchRequest.requests(), iterableWithSize(1)); @@ -1277,10 +1204,7 @@ public void testProfileSearchErrorForApiKeyOwner() { MultiSearchResponse multiSearchResponseWithError = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); // a single search request for a single owner of multiple keys MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; // 2 search requests for the 2 Api key owners @@ -1402,10 +1326,7 @@ private void mockMultiGetRequest(List sampleDocumentPar private void mockMultiGetRequest(List sampleDocumentParameters, Map errors) { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; @SuppressWarnings("unchecked") final ActionListener listener = (ActionListener) invocation.getArguments()[2]; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java index 879e1ac8ad15..6c71f30243ea 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java @@ -89,13 +89,10 @@ public void testLicenseEnforcement() throws Exception { // Disallow by license when(licenseState.isAllowed(Security.ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE)).thenReturn(false); - final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent( - new BytesArray(""" - { - "metadata": {} - }"""), - XContentType.JSON - ).withParams(Map.of("id", randomAlphaOfLength(10))).build(); + final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(new BytesArray(""" + { + "metadata": {} + }"""), XContentType.JSON).withParams(Map.of("id", randomAlphaOfLength(10))).build(); final SetOnce responseSetOnce = new SetOnce<>(); final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { @Override diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle index 45bca8860049..515ffca4a59b 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle @@ -13,7 +13,7 @@ dependencies { javaRestTestImplementation project(':x-pack:qa') } -BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle index 32cab39f665d..4c98276abe15 100644 --- a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle @@ -36,7 +36,7 @@ tasks.register("copyTestNodeKeyMaterial", Copy) { into outputDir } -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> String oldVersion = bwcVersion.toString() // SearchableSnapshotsRollingUpgradeIT uses a specific repository to not interfere with other tests diff --git a/x-pack/plugin/slm/qa/multi-node/build.gradle b/x-pack/plugin/slm/qa/multi-node/build.gradle index 1f4b0c3b10c3..d6b1fe8a1e21 100644 --- a/x-pack/plugin/slm/qa/multi-node/build.gradle +++ b/x-pack/plugin/slm/qa/multi-node/build.gradle @@ -31,7 +31,7 @@ testClusters.configureEach { setting 'logger.org.elasticsearch.xpack.slm', 'TRACE' } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("javaRestTest").configure{enabled = false } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java index 192807d667ab..cc01d5b10110 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java @@ -130,7 +130,6 @@ public Collection createComponents(PluginServices services) { SnapshotLifecycleTemplateRegistry templateRegistry = new SnapshotLifecycleTemplateRegistry( settings, clusterService, - services.featureService(), threadPool, client, services.xContentRegistry() diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java index 96b962f70a1b..274dec75865a 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java @@ -7,12 +7,9 @@ package org.elasticsearch.xpack.slm; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry; -import java.util.Map; import java.util.Set; public class SnapshotLifecycleFeatures implements FeatureSpecification { @@ -20,9 +17,4 @@ public class SnapshotLifecycleFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(SnapshotLifecycleService.INTERVAL_SCHEDULE); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(SnapshotLifecycleTemplateRegistry.MANAGED_BY_DATA_STREAM_LIFECYCLE, Version.V_8_12_0); - } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java index f40ea5a56463..31c624df6781 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.slm.history; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; @@ -47,13 +44,11 @@ public class SnapshotLifecycleTemplateRegistry extends IndexTemplateRegistry { // version 6: manage by data stream lifecycle // version 7: version the index template name so we can upgrade existing deployments public static final int INDEX_TEMPLATE_VERSION = 7; - public static final NodeFeature MANAGED_BY_DATA_STREAM_LIFECYCLE = new NodeFeature("slm-history-managed-by-dsl"); public static final String SLM_TEMPLATE_VERSION_VARIABLE = "xpack.slm.template.version"; public static final String SLM_TEMPLATE_NAME = ".slm-history-" + INDEX_TEMPLATE_VERSION; public static final String SLM_POLICY_NAME = "slm-history-ilm-policy"; - private final FeatureService featureService; @Override protected boolean requiresMasterNode() { @@ -65,13 +60,11 @@ protected boolean requiresMasterNode() { public SnapshotLifecycleTemplateRegistry( Settings nodeSettings, ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; slmHistoryEnabled = SLM_HISTORY_INDEX_ENABLED_SETTING.get(nodeSettings); } @@ -122,9 +115,4 @@ public boolean validate(ClusterState state) { boolean allPoliciesPresent = maybePolicies.map(policies -> policies.keySet().containsAll(policyNames)).orElse(false); return allTemplatesPresent && allPoliciesPresent; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), MANAGED_BY_DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java index d5a8faea1c0a..8f25a4e70388 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -48,7 +47,6 @@ import org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; -import org.elasticsearch.xpack.slm.SnapshotLifecycleFeatures; import org.junit.After; import org.junit.Before; @@ -102,14 +100,7 @@ public void createRegistryAndClient() { ) ); xContentRegistry = new NamedXContentRegistry(entries); - registry = new SnapshotLifecycleTemplateRegistry( - Settings.EMPTY, - clusterService, - new FeatureService(List.of(new SnapshotLifecycleFeatures())), - threadPool, - client, - xContentRegistry - ); + registry = new SnapshotLifecycleTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); } @After @@ -124,7 +115,6 @@ public void testDisabledDoesNotAddTemplates() { SnapshotLifecycleTemplateRegistry disabledRegistry = new SnapshotLifecycleTemplateRegistry( settings, clusterService, - new FeatureService(List.of(new SnapshotLifecycleFeatures())), threadPool, client, xContentRegistry diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle index 03426bdddce6..cb2831f0cf27 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle @@ -39,7 +39,7 @@ tasks.named("javaRestTest").configure { systemProperty 'test.azure.container', azureContainer systemProperty 'test.azure.key', azureKey systemProperty 'test.azure.sas_token', azureSasToken - nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_snapshot_based_recoveries_tests_" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_snapshot_based_recoveries_tests_" + buildParams.testSeed } tasks.register("azureThirdPartyTest") { diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle index 267ed84aa45d..7550ab8585e1 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle @@ -28,7 +28,7 @@ if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) { tasks.named("javaRestTest").configure { systemProperty 'test.google.fixture', Boolean.toString(useFixture) systemProperty 'test.gcs.bucket', gcsBucket - nonInputProperties.systemProperty 'test.gcs.base_path', gcsBasePath + "_snapshot_based_recoveries_tests" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.gcs.base_path', gcsBasePath + "_snapshot_based_recoveries_tests" + buildParams.testSeed if (useFixture == false) { systemProperty 'test.google.account', serviceAccountFile diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle index b669641363bd..e676e1f1f216 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle @@ -47,7 +47,7 @@ tasks.named("javaRestTest").configure { systemProperty("s3AccessKey", s3AccessKey) systemProperty("s3SecretKey", s3SecretKey) nonInputProperties.systemProperty 'test.s3.base_path', - s3BasePath ? s3BasePath + "_snapshot_based_recoveries_tests" + BuildParams.testSeed : 'base_path_integration_tests' + s3BasePath ? s3BasePath + "_snapshot_based_recoveries_tests" + buildParams.testSeed : 'base_path_integration_tests' } tasks.register("s3ThirdPartyTest") { diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle index e304b2ff5c26..af4ed719a9c2 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle @@ -46,7 +46,7 @@ tasks.named("javaRestTest") { systemProperty 'test.azure.sas_token', azureSasToken systemProperty 'test.azure.tenant_id', azureTenantId systemProperty 'test.azure.client_id', azureClientId - nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_repository_test_kit_tests_" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_repository_test_kit_tests_" + buildParams.testSeed } tasks.register("azureThirdPartyTest") { @@ -61,7 +61,7 @@ tasks.register("managedIdentityJavaRestTest", RestIntegTestTask) { systemProperty 'test.azure.container', azureContainer // omitting key and sas_token so that we use a bearer token from the metadata service // omitting client_id and tenant_id so that we use a bearer token from the metadata service, not from workload identity - nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_repository_test_kit_tests_" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_repository_test_kit_tests_" + buildParams.testSeed } tasks.register("workloadIdentityJavaRestTest", RestIntegTestTask) { @@ -74,10 +74,10 @@ tasks.register("workloadIdentityJavaRestTest", RestIntegTestTask) { systemProperty 'test.azure.tenant_id', azureTenantId ?: "583d4f71-148a-4163-bad5-2311e13c60dc" systemProperty 'test.azure.client_id', azureClientId ?: "86dd1b33-96c1-4a2e-92ac-b844404fc691" // omitting key and sas_token so that we use a bearer token from workload identity - nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_repository_test_kit_tests_" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.azure.base_path', azureBasePath + "_repository_test_kit_tests_" + buildParams.testSeed } -if (BuildParams.inFipsJvm) { +if (buildParams.inFipsJvm) { // Cannot override the trust store in FIPS mode, and these tasks require a HTTPS fixture tasks.named("managedIdentityJavaRestTest").configure { enabled = false } tasks.named("workloadIdentityJavaRestTest").configure { enabled = false } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle index 4f0a1c4faf0a..b7e1036ab3e2 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle @@ -36,7 +36,7 @@ if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) { tasks.named("javaRestTest").configure { systemProperty 'test.google.fixture', Boolean.toString(useFixture) systemProperty 'test.gcs.bucket', gcsBucket - nonInputProperties.systemProperty 'test.gcs.base_path', gcsBasePath + "_repository_test_kit_tests" + BuildParams.testSeed + nonInputProperties.systemProperty 'test.gcs.base_path', gcsBasePath + "_repository_test_kit_tests" + buildParams.testSeed if (useFixture == false) { systemProperty 'test.google.account', serviceAccountFile diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index 3fbb55ca4eb3..14e2b05bc140 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -34,7 +34,7 @@ dependencies { tasks.named("javaRestTest").configure { usesDefaultDistribution() description = "Runs rest tests against an elasticsearch cluster with HDFS." - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) // required for krb5kdc-fixture to work jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle index 21cf952f05bf..313a11f8ce43 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle @@ -45,7 +45,7 @@ tasks.named("javaRestTest").configure { systemProperty("s3AccessKey", s3AccessKey) systemProperty("s3SecretKey", s3SecretKey) nonInputProperties.systemProperty 'test.s3.base_path', - s3BasePath ? s3BasePath + "_repo_test_kit_tests" + BuildParams.testSeed : 'base_path_integration_tests' + s3BasePath ? s3BasePath + "_repo_test_kit_tests" + buildParams.testSeed : 'base_path_integration_tests' } tasks.register("s3ThirdPartyTest") { diff --git a/x-pack/plugin/spatial/build.gradle b/x-pack/plugin/spatial/build.gradle index 5bcec68c227c..4304bae5b999 100644 --- a/x-pack/plugin/spatial/build.gradle +++ b/x-pack/plugin/spatial/build.gradle @@ -28,7 +28,7 @@ testClusters.configureEach { setting 'xpack.security.enabled', 'false' } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index 85d778f9ec87..d1dcbc3adbd9 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -137,7 +137,7 @@ allprojects { } } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("internalClusterTest").configure{enabled = false } } diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 138f3e63af46..d1b179f09e40 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -1,6 +1,6 @@ apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' -apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'com.gradleup.shadow' description = 'JDBC driver for Elasticsearch' diff --git a/x-pack/plugin/sql/qa/jdbc/build.gradle b/x-pack/plugin/sql/qa/jdbc/build.gradle index 022306fe9b30..a444399ed28c 100644 --- a/x-pack/plugin/sql/qa/jdbc/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/build.gradle @@ -72,11 +72,11 @@ subprojects { // Configure compatibility testing tasks // Compatibility testing for JDBC driver started with version 7.9.0 - BuildParams.bwcVersions.indexCompatible.findAll({ it.onOrAfter(Version.fromString("7.9.0")) && it != VersionProperties.elasticsearchVersion }).each { bwcVersion -> + buildParams.bwcVersions.indexCompatible.findAll({ it.onOrAfter(Version.fromString("7.9.0")) && it != VersionProperties.elasticsearchVersion }).each { bwcVersion -> def baseName = "v${bwcVersion}" def cluster = testClusters.register(baseName) - UnreleasedVersionInfo unreleasedVersion = BuildParams.bwcVersions.unreleasedInfo(bwcVersion) + UnreleasedVersionInfo unreleasedVersion = buildParams.bwcVersions.unreleasedInfo(bwcVersion) Configuration driverConfiguration = configurations.create("jdbcDriver${baseName}") { // TODO: Temporary workaround for https://github.com/elastic/elasticsearch/issues/73433 transitive = false diff --git a/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle index ec88fcffa941..971c7bf31924 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle @@ -11,5 +11,5 @@ testClusters.configureEach { // JDBC client can only be configured for SSL with keystores, but we can't use JKS/PKCS12 keystores in FIPS 140-2 mode. tasks.withType(Test).configureEach { - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/plugin/sql/qa/mixed-node/build.gradle b/x-pack/plugin/sql/qa/mixed-node/build.gradle index 412dec62f81f..06e3b61d5b30 100644 --- a/x-pack/plugin/sql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/sql/qa/mixed-node/build.gradle @@ -19,7 +19,7 @@ testClusters.configureEach { tasks.named("javaRestTest").configure{ enabled = false} // A bug (https://github.com/elastic/elasticsearch/issues/68439) limits us to perform tests with versions from 7.10.3 onwards -BuildParams.bwcVersions.withWireCompatible(v -> v.onOrAfter("7.10.3") && +buildParams.bwcVersions.withWireCompatible(v -> v.onOrAfter("7.10.3") && v != VersionProperties.getElasticsearchVersion()) { bwcVersion, baseName -> def baseCluster = testClusters.register(baseName) { diff --git a/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle index 907d72e606bd..51a3f83a909a 100644 --- a/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle @@ -6,7 +6,7 @@ tasks.named("javaRestTest").configure { // Do not attempt to form a cluster in a FIPS JVM, as doing so with a JKS keystore will fail. // TODO Revisit this when SQL CLI client can handle key/certificate instead of only Keystores. // https://github.com/elastic/elasticsearch/issues/32306 - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } testClusters.matching { it.name == "javaRestTest" }.configureEach { diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index b9713bcb8e7a..cd24dcc15c86 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -8,7 +8,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams */ apply plugin: 'elasticsearch.build' -apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'com.gradleup.shadow' /* We don't use the 'application' plugin because it builds a zip and tgz which * we don't want. */ @@ -55,7 +55,7 @@ tasks.register("runcli") { description = 'Run the CLI and connect to elasticsearch running on 9200' dependsOn "shadowJar" doLast { - List command = ["${BuildParams.runtimeJavaHome}/bin/java"] + List command = ["${buildParams.runtimeJavaHome.get()}/bin/java"] if ('true'.equals(providers.systemProperty('debug').orElse('false').get())) { command += '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000' } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java index b2dc04c1178e..c89a8237d40b 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -10,12 +10,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -35,7 +33,6 @@ import java.util.Map; import static org.elasticsearch.xpack.stack.StackTemplateRegistry.STACK_TEMPLATES_ENABLED; -import static org.elasticsearch.xpack.stack.StackTemplateRegistry.STACK_TEMPLATES_FEATURE; @Deprecated(since = "8.12.0", forRemoval = true) public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { @@ -48,7 +45,6 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; private final ClusterService clusterService; - private final FeatureService featureService; private volatile boolean stackTemplateEnabled; private static final Map ADDITIONAL_TEMPLATE_VARIABLES = Map.of("xpack.stack.template.deprecated", "true"); @@ -95,12 +91,10 @@ public LegacyStackTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); this.clusterService = clusterService; - this.featureService = featureService; this.stackTemplateEnabled = STACK_TEMPLATES_ENABLED.get(nodeSettings); } @@ -282,12 +276,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only once all nodes are updated to 8.9.0. - // This is necessary to prevent an error caused nby the usage of the ignore_missing_pipeline property - // in the pipeline processor, which has been introduced only in 8.9.0 - return featureService.clusterHasFeature(event.state(), STACK_TEMPLATES_FEATURE); - } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java index 71d01798323d..73c18a3cc261 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java @@ -33,8 +33,7 @@ public Collection createComponents(PluginServices services) { services.clusterService(), services.threadPool(), services.client(), - services.xContentRegistry(), - services.featureService() + services.xContentRegistry() ); legacyStackTemplateRegistry.initialize(); StackTemplateRegistry stackTemplateRegistry = new StackTemplateRegistry( @@ -42,8 +41,7 @@ public Collection createComponents(PluginServices services) { services.clusterService(), services.threadPool(), services.client(), - services.xContentRegistry(), - services.featureService() + services.xContentRegistry() ); stackTemplateRegistry.initialize(); return List.of(legacyStackTemplateRegistry, stackTemplateRegistry); diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index ce1b664a4688..aeb9bf2bfa5c 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -10,14 +10,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -38,13 +35,6 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(StackTemplateRegistry.class); - // Historical node feature kept here as LegacyStackTemplateRegistry is deprecated - public static final NodeFeature STACK_TEMPLATES_FEATURE = new NodeFeature("stack.templates_supported"); - - // this node feature is a redefinition of {@link DataStreamFeatures#DATA_STREAM_LIFECYCLE} and it's meant to avoid adding a - // dependency to the data-streams module just for this - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); - // The stack template registry version. This number must be incremented when we make changes // to built-in templates. public static final int REGISTRY_VERSION = 14; @@ -58,7 +48,6 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { ); private final ClusterService clusterService; - private final FeatureService featureService; private final Map componentTemplateConfigs; private volatile boolean stackTemplateEnabled; @@ -121,12 +110,10 @@ public StackTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); this.clusterService = clusterService; - this.featureService = featureService; this.stackTemplateEnabled = STACK_TEMPLATES_ENABLED.get(nodeSettings); this.componentTemplateConfigs = loadComponentTemplateConfigs(); } @@ -355,11 +342,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only after versions that support data stream lifecycle - // due to .kibana-reporting making use of the feature - return featureService.clusterHasFeature(event.state(), DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java deleted file mode 100644 index 7b05231fcfd1..000000000000 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.stack; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -public class StackTemplatesFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of(StackTemplateRegistry.STACK_TEMPLATES_FEATURE, Version.V_8_9_0); - } -} diff --git a/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification deleted file mode 100644 index 30a1498a5472..000000000000 --- a/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0; you may not use this file except in compliance with the Elastic License -# 2.0. -# - -org.elasticsearch.xpack.stack.StackTemplatesFeatures diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java index 39f58e638aa6..b8c64f945db0 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -25,8 +24,6 @@ import org.junit.After; import org.junit.Before; -import java.util.List; - public class LegacyStackTemplateRegistryTests extends ESTestCase { private LegacyStackTemplateRegistry registry; private ThreadPool threadPool; @@ -36,15 +33,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); Client client = new NoOpClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - var featureService = new FeatureService(List.of(new StackTemplatesFeatures())); - registry = new LegacyStackTemplateRegistry( - Settings.EMPTY, - clusterService, - threadPool, - client, - NamedXContentRegistry.EMPTY, - featureService - ); + registry = new LegacyStackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java index c1c855867599..7f674e24658d 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.template.IndexTemplateConfig; @@ -24,10 +23,9 @@ class StackRegistryWithNonRequiredTemplates extends StackTemplateRegistry { ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - super(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService); + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); } @Override diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 25ff3b5311fa..35e81f6f4c8c 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.stack; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -29,8 +28,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -71,7 +68,6 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -82,22 +78,13 @@ public class StackTemplateRegistryTests extends ESTestCase { private ClusterService clusterService; private ThreadPool threadPool; private VerifyingClient client; - private FeatureService featureService; @Before public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); clusterService = ClusterServiceUtils.createClusterService(threadPool); - featureService = new FeatureService(List.of(new StackTemplatesFeatures(), new DataStreamFeatures())); - registry = new StackTemplateRegistry( - Settings.EMPTY, - clusterService, - threadPool, - client, - NamedXContentRegistry.EMPTY, - featureService - ); + registry = new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -114,8 +101,7 @@ public void testDisabledDoesNotAddIndexTemplates() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); assertThat(disabledRegistry.getComposableTemplateConfigs(), anEmptyMap()); } @@ -127,8 +113,7 @@ public void testDisabledStillAddsComponentTemplatesAndIlmPolicies() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); assertThat(disabledRegistry.getComponentTemplateConfigs(), not(anEmptyMap())); assertThat( @@ -371,8 +356,7 @@ public void testMissingNonRequiredTemplates() throws Exception { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); DiscoveryNode node = DiscoveryNodeUtils.create("node"); @@ -519,25 +503,6 @@ public void testThatMissingMasterNodeDoesNothing() { registry.clusterChanged(event); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_10_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.11.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - public void testThatTemplatesAreNotDeprecated() { for (ComposableIndexTemplate it : registry.getComposableTemplateConfigs().values()) { assertFalse(it.isDeprecated()); @@ -555,11 +520,6 @@ public void testThatTemplatesAreNotDeprecated() { .forEach(p -> assertFalse((Boolean) p.get("deprecated"))); } - public void testDataStreamLifecycleNodeFeatureId() { - // let's make sure these ids remain in-sync - assertThat(StackTemplateRegistry.DATA_STREAM_LIFECYCLE.id(), is(DataStreamFeatures.DATA_STREAM_LIFECYCLE.id())); - } - // ------------- /** diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index b429e123bb63..eb0551a4d10e 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -12,7 +12,7 @@ dependencies { testImplementation project(':x-pack:qa') } -Version ccsCompatVersion = BuildParams.bwcVersions.minimumWireCompatibleVersion +Version ccsCompatVersion = buildParams.bwcVersions.minimumWireCompatibleVersion restResources { restApi { diff --git a/x-pack/plugin/watcher/qa/rest/build.gradle b/x-pack/plugin/watcher/qa/rest/build.gradle index a911c022212b..8382a7109272 100644 --- a/x-pack/plugin/watcher/qa/rest/build.gradle +++ b/x-pack/plugin/watcher/qa/rest/build.gradle @@ -29,7 +29,7 @@ testClusters.configureEach { setting 'logger.org.elasticsearch.xpack.watcher', 'DEBUG' } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("javaRestTest").configure{enabled = false } tasks.named("yamlRestTest").configure{enabled = false } diff --git a/x-pack/plugin/wildcard/build.gradle b/x-pack/plugin/wildcard/build.gradle index 2bcf0db057aa..b582f3fcea90 100644 --- a/x-pack/plugin/wildcard/build.gradle +++ b/x-pack/plugin/wildcard/build.gradle @@ -20,7 +20,7 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) } -if (BuildParams.isSnapshotBuild() == false) { +if (buildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 0b8e459ed231..8a67a2c1dde0 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -27,7 +27,7 @@ tasks.named("yamlRestTest").configure { 'index/10_with_id/Index with ID', 'indices.get_alias/10_basic/Get alias against closed indices' ]; - if (BuildParams.isSnapshotBuild() == false) { + if (buildParams.isSnapshotBuild() == false) { blacklist += [ 'synonyms_privileges/10_synonyms_with_privileges/*', 'synonyms_privileges/20_synonyms_no_privileges/*' diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index 7248d1b0a6bf..d6b05242f613 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -15,7 +15,7 @@ dependencies { } -BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/x-pack/qa/mixed-tier-cluster/build.gradle b/x-pack/qa/mixed-tier-cluster/build.gradle index bf05be45e18a..79e7d6a65599 100644 --- a/x-pack/qa/mixed-tier-cluster/build.gradle +++ b/x-pack/qa/mixed-tier-cluster/build.gradle @@ -10,7 +10,7 @@ dependencies { } // Only run tests for 7.9+, since the node.roles setting was introduced in 7.9.0 -BuildParams.bwcVersions.withWireCompatible(v -> v.onOrAfter("7.9.0") && +buildParams.bwcVersions.withWireCompatible(v -> v.onOrAfter("7.9.0") && v != VersionProperties.getElasticsearchVersion()) { bwcVersion, baseName -> def baseCluster = testClusters.register(baseName) { @@ -54,5 +54,5 @@ tasks.withType(Test).configureEach { classpath = sourceSets.javaRestTest.runtimeClasspath testClassesDirs = sourceSets.javaRestTest.output.classesDirs // Security is explicitly disabled, do not run tests in FIPS mode - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle index 6d41c4eddf31..9c0648abca21 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle @@ -13,7 +13,7 @@ restResources { } // randomise between sniff and proxy modes -boolean proxyMode = BuildParams.random.nextBoolean() +boolean proxyMode = buildParams.random.nextBoolean() def fulfillingCluster = testClusters.register('fulfilling-cluster') { setting 'xpack.security.enabled', 'true' diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle index 69c0e8b20c2c..ca79bb7ec382 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle @@ -13,7 +13,7 @@ restResources { } // randomise between sniff and proxy modes -boolean proxyMode = BuildParams.random.nextBoolean() +boolean proxyMode = buildParams.random.nextBoolean() def fulfillingCluster = testClusters.register('fulfilling-cluster') { setting 'xpack.security.enabled', 'true' diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle index 1164aa240ee2..b9f836976347 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle @@ -23,7 +23,7 @@ tasks.register("copyCerts", Sync) { } // randomise between sniff and proxy modes -boolean proxyMode = BuildParams.random.nextBoolean() +boolean proxyMode = buildParams.random.nextBoolean() def fulfillingCluster = testClusters.register('fulfilling-cluster') { setting 'xpack.security.enabled', 'true' diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index 8f46613d5d9f..b53539b22486 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -11,5 +11,5 @@ dependencies { tasks.named('javaRestTest') { usesDefaultDistribution() // test suite uses jks which is not supported in fips mode - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 1abf6662a1b8..78cfc0f688e4 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -98,7 +98,7 @@ if (OS.current() == OS.WINDOWS) { TaskProvider fixture = tasks.register("oldES${versionNoDots}Fixture", AntFixture) { dependsOn project.configurations.oldesFixture, jdks.legacy, config - executable = "${BuildParams.runtimeJavaHome}/bin/java" + executable = "${buildParams.runtimeJavaHome.get()}/bin/java" env 'CLASSPATH', "${-> project.configurations.oldesFixture.asPath}" // old versions of Elasticsearch need JAVA_HOME env 'JAVA_HOME', jdks.legacy.javaHomePath diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index a7ea1695c477..09b3b7db7c91 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -9,7 +9,7 @@ dependencies { testImplementation project(':x-pack:qa') } -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> def baseCluster = testClusters.register(baseName) { testDistribution = "DEFAULT" versions = [bwcVersion.toString(), project.version] @@ -78,5 +78,5 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> // Security is explicitly disabled, do not run tests in FIPS mode tasks.withType(Test).configureEach { - enabled = BuildParams.inFipsJvm == false -} \ No newline at end of file + enabled = buildParams.inFipsJvm == false +} diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle index 969ba23e1925..0d1cfbd5ff02 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle +++ b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle @@ -9,7 +9,7 @@ dependencies { testImplementation project(':x-pack:qa') } -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> def baseLeaderCluster = testClusters.register("${baseName}-leader") { numberOfNodes = 3 @@ -92,5 +92,5 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> // Security is explicitly disabled, do not run tests in FIPS mode tasks.withType(Test).configureEach { - BuildParams.withFipsEnabledOnly(it) + buildParams.withFipsEnabledOnly(it) } diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 271aadfe4b38..60fb55e9a259 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -31,7 +31,7 @@ tasks.register("copyTestNodeKeyMaterial", Copy) { into outputDir } -BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> +buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> String oldVersion = bwcVersion.toString() // SearchableSnapshotsRollingUpgradeIT uses a specific repository to not interfere with other tests diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java deleted file mode 100644 index e864a579bd0b..000000000000 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.upgrades; - -import org.elasticsearch.Build; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.client.Request; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; - -import java.util.Map; - -import static org.elasticsearch.cluster.ClusterState.INFERRED_TRANSPORT_VERSION; -import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.everyItem; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.oneOf; - -public class TransportVersionClusterStateUpgradeIT extends AbstractUpgradeTestCase { - - public void testReadsInferredTransportVersions() throws Exception { - // waitUntil because the versions fixup on upgrade happens in the background so may need a retry - assertTrue(waitUntil(() -> { - try { - // check several responses in order to sample from a selection of nodes - for (int i = getClusterHosts().size(); i > 0; i--) { - if (runTransportVersionsTest() == false) { - return false; - } - } - return true; - } catch (Exception e) { - throw new AssertionError(e); - } - })); - } - - private boolean runTransportVersionsTest() throws Exception { - final var clusterState = ObjectPath.createFromResponse( - client().performRequest(new Request("GET", "/_cluster/state" + randomFrom("", "/nodes") + randomFrom("", "?local"))) - ); - final var description = clusterState.toString(); - - final var nodeIds = clusterState.evaluateMapKeys("nodes"); - final Map versionsByNodeId = Maps.newHashMapWithExpectedSize(nodeIds.size()); - for (final var nodeId : nodeIds) { - versionsByNodeId.put(nodeId, clusterState.evaluate("nodes." + nodeId + ".version")); - } - - final var hasTransportVersions = clusterState.evaluate("transport_versions") != null; - final var hasNodesVersions = clusterState.evaluate("nodes_versions") != null; - assertFalse(description, hasNodesVersions && hasTransportVersions); - - switch (CLUSTER_TYPE) { - case OLD -> { - if (clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) == false) { - // Before 8.8.0 there was only DiscoveryNode#version - assertFalse(description, hasTransportVersions); - assertFalse(description, hasNodesVersions); - } else if (clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false) { - // In [8.8.0, 8.11.0) we exposed just transport_versions - assertTrue(description, hasTransportVersions); - assertFalse(description, hasNodesVersions); - } else { - // From 8.11.0 onwards we exposed nodes_versions - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - } - } - case MIXED -> { - if (clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) == false) { - // Responding node might be <8.8.0 (so no extra versions) or >=8.11.0 (includes nodes_versions) - assertFalse(description, hasTransportVersions); - } else if (clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false) { - // Responding node might be in [8.8.0, 8.11.0) (transport_versions) or >=8.11.0 (includes nodes_versions) but not both - assertTrue(description, hasNodesVersions || hasTransportVersions); - } else { - // Responding node is ≥8.11.0 so has nodes_versions for sure - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - } - } - case UPGRADED -> { - // All nodes are Version.CURRENT, ≥8.11.0, so we definitely have nodes_versions - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - assertThat(description, versionsByNodeId.values(), everyItem(equalTo(Build.current().version()))); - } - } - - if (hasTransportVersions) { - // Upgrading from [8.8.0, 8.11.0) and the responding node is still on the old version - assertFalse(description, clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION)); - assertTrue(description, clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED)); - assertNotEquals(description, ClusterType.UPGRADED, CLUSTER_TYPE); - - // transport_versions includes the correct version for all nodes, no inference is needed - assertEquals(description, nodeIds.size(), clusterState.evaluateArraySize("transport_versions")); - for (int i = 0; i < nodeIds.size(); i++) { - final var path = "transport_versions." + i; - final String nodeId = clusterState.evaluate(path + ".node_id"); - final var nodeDescription = nodeId + "/" + description; - final var transportVersion = TransportVersion.fromString(clusterState.evaluate(path + ".transport_version")); - final var nodeVersion = versionsByNodeId.get(nodeId); - assertNotNull(nodeDescription, nodeVersion); - if (nodeVersion.equals(Build.current().version())) { - assertEquals(nodeDescription, TransportVersion.current(), transportVersion); - } else { - // There's no relationship between node versions and transport versions anymore, although we can be sure of this: - assertThat(nodeDescription, transportVersion, greaterThanOrEqualTo(INFERRED_TRANSPORT_VERSION)); - } - } - } else if (hasNodesVersions) { - // Either upgrading from ≥8.11.0 (the responding node might be old or new), or from <8.8.0 (the responding node is new) - assertFalse( - description, - clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false - && CLUSTER_TYPE == ClusterType.OLD - ); - - // nodes_versions includes _a_ version for all nodes; it might be correct, or it might be inferred if we're upgrading from - // <8.8.0 and the master is still an old node or the TransportVersionsFixupListener hasn't run yet - assertEquals(description, nodeIds.size(), clusterState.evaluateArraySize("nodes_versions")); - for (int i = 0; i < nodeIds.size(); i++) { - final var path = "nodes_versions." + i; - final String nodeId = clusterState.evaluate(path + ".node_id"); - final var nodeDescription = nodeId + "/" + description; - final var transportVersion = TransportVersion.fromString(clusterState.evaluate(path + ".transport_version")); - final var nodeVersion = versionsByNodeId.get(nodeId); - assertNotNull(nodeDescription, nodeVersion); - if (nodeVersion.equals(Build.current().version())) { - // Either the responding node is upgraded or the upgrade is trivial; if the responding node is upgraded but the master - // is not then its transport version may be temporarily inferred as 8.8.0 until TransportVersionsFixupListener runs. - assertThat( - nodeDescription, - transportVersion, - clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) - ? equalTo(TransportVersion.current()) - : oneOf(TransportVersion.current(), INFERRED_TRANSPORT_VERSION) - ); - if (CLUSTER_TYPE == ClusterType.UPGRADED && transportVersion.equals(INFERRED_TRANSPORT_VERSION)) { - // TransportVersionsFixupListener should run soon, retry - logger.info("{} - not fixed up yet, retrying", nodeDescription); - return false; - } - } else { - var version = parseLegacyVersion(nodeVersion); - // All non-semantic versions are after 8.8.0 and have transport version - var transportVersionIntroduced = version.map(v -> v.after(VERSION_INTRODUCING_TRANSPORT_VERSIONS)).orElse(true); - if (transportVersionIntroduced) { - // There's no relationship between node versions and transport versions anymore, although we can be sure of this: - assertThat(nodeDescription, transportVersion, greaterThan(INFERRED_TRANSPORT_VERSION)); - } else { - // Responding node is not upgraded, and no later than 8.8.0, so we infer its version correctly. - assertEquals(nodeDescription, TransportVersion.fromId(version.get().id()), transportVersion); - } - } - } - } - - return true; - } -} diff --git a/x-pack/qa/third-party/jira/build.gradle b/x-pack/qa/third-party/jira/build.gradle index b7268af80753..626693a8f295 100644 --- a/x-pack/qa/third-party/jira/build.gradle +++ b/x-pack/qa/third-party/jira/build.gradle @@ -55,7 +55,7 @@ if (!jiraUrl && !jiraUser && !jiraPassword && !jiraProject) { tasks.named("yamlRestTest")configure { finalizedBy "cleanJira" } } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("yamlRestTest").configure{ enabled = false } } diff --git a/x-pack/qa/third-party/pagerduty/build.gradle b/x-pack/qa/third-party/pagerduty/build.gradle index 4b5a0bbeeeb4..86ed67ccbb2d 100644 --- a/x-pack/qa/third-party/pagerduty/build.gradle +++ b/x-pack/qa/third-party/pagerduty/build.gradle @@ -28,7 +28,7 @@ if (!pagerDutyServiceKey) { } } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("yamlRestTest").configure{enabled = false } } diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index b2b0478da047..ff501a7c99c9 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -28,7 +28,7 @@ if (!slackUrl) { } } -if (BuildParams.inFipsJvm){ +if (buildParams.inFipsJvm){ // Test clusters run with security disabled tasks.named("yamlRestTest").configure{enabled = false } }