diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 6c8b8edfcbac..4bc72aec2097 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 69d11ef1dabb..3d6095d0b9e6 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 5514fc376a28..f92881da7fea 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,5 @@ BWC_VERSION: + - "8.15.6" - "8.16.2" - "8.17.0" - "8.18.0" diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy index 6d080e1c8076..bb100b6b2388 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy @@ -9,9 +9,10 @@ package org.elasticsearch.gradle.internal +import spock.lang.Unroll + import org.elasticsearch.gradle.fixtures.AbstractGitAwareGradleFuncTest import org.gradle.testkit.runner.TaskOutcome -import spock.lang.Unroll class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleFuncTest { @@ -23,8 +24,10 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF apply plugin: 'elasticsearch.internal-distribution-bwc-setup' """ execute("git branch origin/8.x", file("cloned")) + execute("git branch origin/8.3", file("cloned")) + execute("git branch origin/8.2", file("cloned")) + execute("git branch origin/8.1", file("cloned")) execute("git branch origin/7.16", file("cloned")) - execute("git branch origin/7.15", file("cloned")) } def "builds distribution from branches via archives extractedAssemble"() { @@ -48,10 +51,11 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF assertOutputContains(result.output, "[$bwcDistVersion] > Task :distribution:archives:darwin-tar:${expectedAssembleTaskName}") where: - bwcDistVersion | bwcProject | expectedAssembleTaskName - "8.0.0" | "minor" | "extractedAssemble" - "7.16.0" | "staged" | "extractedAssemble" - "7.15.2" | "bugfix" | "extractedAssemble" + bwcDistVersion | bwcProject | expectedAssembleTaskName + "8.4.0" | "minor" | "extractedAssemble" + "8.3.0" | "staged" | "extractedAssemble" + "8.2.1" | "bugfix" | "extractedAssemble" + "8.1.3" | "bugfix2" | "extractedAssemble" } @Unroll @@ -70,8 +74,8 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF where: bwcDistVersion | platform - "8.0.0" | "darwin" - "8.0.0" | "linux" + "8.4.0" | "darwin" + "8.4.0" | "linux" } def "bwc expanded distribution folder can be resolved as bwc project artifact"() { @@ -107,11 +111,11 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF result.task(":resolveExpandedDistribution").outcome == TaskOutcome.SUCCESS result.task(":distribution:bwc:minor:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS and: "assemble task triggered" - result.output.contains("[8.0.0] > Task :distribution:archives:darwin-tar:extractedAssemble") + result.output.contains("[8.4.0] > Task :distribution:archives:darwin-tar:extractedAssemble") result.output.contains("expandedRootPath /distribution/bwc/minor/build/bwc/checkout-8.x/" + "distribution/archives/darwin-tar/build/install") result.output.contains("nested folder /distribution/bwc/minor/build/bwc/checkout-8.x/" + - "distribution/archives/darwin-tar/build/install/elasticsearch-8.0.0-SNAPSHOT") + "distribution/archives/darwin-tar/build/install/elasticsearch-8.4.0-SNAPSHOT") } } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy index eb6185e5aed5..fc5d432a9ef9 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy @@ -57,7 +57,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest elasticsearch_distributions { test_distro { - version = "8.0.0" + version = "8.4.0" type = "archive" platform = "linux" architecture = Architecture.current(); @@ -87,7 +87,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest elasticsearch_distributions { test_distro { - version = "8.0.0" + version = "8.4.0" type = "archive" platform = "linux" architecture = Architecture.current(); diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy index e3efe3d7ffbf..15b057a05e03 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy @@ -40,7 +40,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() - subProject(":distribution:bwc:staged") << """ + subProject(":distribution:bwc:minor") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -61,11 +61,11 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe result.task(transformTask).outcome == TaskOutcome.NO_SOURCE } - def "yamlRestCompatTest executes and copies api and transforms tests from :bwc:staged"() { + def "yamlRestCompatTest executes and copies api and transforms tests from :bwc:minor"() { given: internalBuild() - subProject(":distribution:bwc:staged") << """ + subProject(":distribution:bwc:minor") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -98,8 +98,8 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe String api = "foo.json" String test = "10_basic.yml" //add the compatible test and api files, these are the prior version's normal yaml rest tests - file("distribution/bwc/staged/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" - file("distribution/bwc/staged/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" + file("distribution/bwc/minor/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" + file("distribution/bwc/minor/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" when: def result = gradleRunner("yamlRestCompatTest").build() @@ -145,7 +145,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() withVersionCatalogue() - subProject(":distribution:bwc:staged") << """ + subProject(":distribution:bwc:minor") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -186,7 +186,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() - subProject(":distribution:bwc:staged") << """ + subProject(":distribution:bwc:minor") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -230,7 +230,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe setupRestResources([], []) - file("distribution/bwc/staged/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ + file("distribution/bwc/minor/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ "one": - do: do_.some.key_to_replace: diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix2/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix2/build.gradle new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/maintenance/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/maintenance/build.gradle new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle index 8c321294b585..e931537fcd6e 100644 --- a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle +++ b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle @@ -10,9 +10,11 @@ rootProject.name = "root" include ":distribution:bwc:bugfix" +include ":distribution:bwc:bugfix2" include ":distribution:bwc:minor" include ":distribution:bwc:major" include ":distribution:bwc:staged" +include ":distribution:bwc:maintenance" include ":distribution:archives:darwin-tar" include ":distribution:archives:oss-darwin-tar" include ":distribution:archives:darwin-aarch64-tar" diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java index 93c2623a23d3..37b28389ad97 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java @@ -21,14 +21,15 @@ import java.util.Optional; import java.util.Set; import java.util.TreeMap; -import java.util.TreeSet; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static java.util.Collections.reverseOrder; import static java.util.Collections.unmodifiableList; +import static java.util.Comparator.comparing; /** * A container for elasticsearch supported version information used in BWC testing. @@ -73,11 +74,11 @@ public class BwcVersions implements Serializable { private final transient List versions; private final Map unreleased; - public BwcVersions(List versionLines) { - this(versionLines, Version.fromString(VersionProperties.getElasticsearch())); + public BwcVersions(List versionLines, List developmentBranches) { + this(versionLines, Version.fromString(VersionProperties.getElasticsearch()), developmentBranches); } - public BwcVersions(Version currentVersionProperty, List allVersions) { + public BwcVersions(Version currentVersionProperty, List allVersions, List developmentBranches) { if (allVersions.isEmpty()) { throw new IllegalArgumentException("Could not parse any versions"); } @@ -86,12 +87,12 @@ public BwcVersions(Version currentVersionProperty, List allVersions) { this.currentVersion = allVersions.get(allVersions.size() - 1); assertCurrentVersionMatchesParsed(currentVersionProperty); - this.unreleased = computeUnreleased(); + this.unreleased = computeUnreleased(developmentBranches); } // Visible for testing - BwcVersions(List versionLines, Version currentVersionProperty) { - this(currentVersionProperty, parseVersionLines(versionLines)); + BwcVersions(List versionLines, Version currentVersionProperty, List developmentBranches) { + this(currentVersionProperty, parseVersionLines(versionLines), developmentBranches); } private static List parseVersionLines(List versionLines) { @@ -126,58 +127,77 @@ public void forPreviousUnreleased(Consumer consumer) { getUnreleased().stream().filter(version -> version.equals(currentVersion) == false).map(unreleased::get).forEach(consumer); } - private String getBranchFor(Version version) { - if (version.equals(currentVersion)) { - // Just assume the current branch is 'main'. It's actually not important, we never check out the current branch. - return "main"; - } else { + private String getBranchFor(Version version, List developmentBranches) { + // If the current version matches a specific feature freeze branch, use that + if (developmentBranches.contains(version.getMajor() + "." + version.getMinor())) { return version.getMajor() + "." + version.getMinor(); + } else if (developmentBranches.contains(version.getMajor() + ".x")) { // Otherwise if an n.x branch exists and we are that major + return version.getMajor() + ".x"; + } else { // otherwise we're the main branch + return "main"; } } - private Map computeUnreleased() { - Set unreleased = new TreeSet<>(); - // The current version is being worked, is always unreleased - unreleased.add(currentVersion); - // Recurse for all unreleased versions starting from the current version - addUnreleased(unreleased, currentVersion, 0); + private Map computeUnreleased(List developmentBranches) { + Map result = new TreeMap<>(); - // Grab the latest version from the previous major if necessary as well, this is going to be a maintenance release - Version maintenance = versions.stream() - .filter(v -> v.getMajor() == currentVersion.getMajor() - 1) - .max(Comparator.naturalOrder()) - .orElseThrow(); - // This is considered the maintenance release only if we haven't yet encountered it - boolean hasMaintenanceRelease = unreleased.add(maintenance); + // The current version is always in development + String currentBranch = getBranchFor(currentVersion, developmentBranches); + result.put(currentVersion, new UnreleasedVersionInfo(currentVersion, currentBranch, ":distribution")); + + // Check for an n.x branch as well + if (currentBranch.equals("main") && developmentBranches.stream().anyMatch(s -> s.endsWith(".x"))) { + // This should correspond to the latest new minor + Version version = versions.stream() + .sorted(Comparator.reverseOrder()) + .filter(v -> v.getMajor() == (currentVersion.getMajor() - 1) && v.getRevision() == 0) + .findFirst() + .orElseThrow(() -> new IllegalStateException("Unable to determine development version for branch")); + String branch = getBranchFor(version, developmentBranches); + assert branch.equals(currentVersion.getMajor() - 1 + ".x") : "Expected branch does not match development branch"; + + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:minor")); + } - List unreleasedList = unreleased.stream().sorted(Comparator.reverseOrder()).toList(); - Map result = new TreeMap<>(); - boolean newMinor = false; - for (int i = 0; i < unreleasedList.size(); i++) { - Version esVersion = unreleasedList.get(i); - // This is either a new minor or staged release - if (currentVersion.equals(esVersion)) { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution")); - } else if (esVersion.getRevision() == 0) { - // If there are two upcoming unreleased minors then this one is the new minor - if (newMinor == false && unreleasedList.get(i + 1).getRevision() == 0) { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, esVersion.getMajor() + ".x", ":distribution:bwc:minor")); - newMinor = true; - } else if (newMinor == false - && unreleasedList.stream().filter(v -> v.getMajor() == esVersion.getMajor() && v.getRevision() == 0).count() == 1) { - // This is the only unreleased new minor which means we've not yet staged it for release - result.put(esVersion, new UnreleasedVersionInfo(esVersion, esVersion.getMajor() + ".x", ":distribution:bwc:minor")); - newMinor = true; - } else { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:staged")); - } - } else { - // If this is the oldest unreleased version and we have a maintenance release - if (i == unreleasedList.size() - 1 && hasMaintenanceRelease) { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:maintenance")); - } else { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:bugfix")); - } + // Now handle all the feature freeze branches + List featureFreezeBranches = developmentBranches.stream() + .filter(b -> Pattern.matches("[0-9]+\\.[0-9]+", b)) + .sorted(reverseOrder(comparing(s -> Version.fromString(s, Version.Mode.RELAXED)))) + .toList(); + + boolean existingBugfix = false; + for (int i = 0; i < featureFreezeBranches.size(); i++) { + String branch = featureFreezeBranches.get(i); + Version version = versions.stream() + .sorted(Comparator.reverseOrder()) + .filter(v -> v.toString().startsWith(branch)) + .findFirst() + .orElse(null); + + // If we don't know about this version we can ignore it + if (version == null) { + continue; + } + + // If this is the current version we can ignore as we've already handled it + if (version.equals(currentVersion)) { + continue; + } + + // We only maintain compatibility back one major so ignore anything older + if (currentVersion.getMajor() - version.getMajor() > 1) { + continue; + } + + // This is the maintenance version + if (i == featureFreezeBranches.size() - 1) { + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:maintenance")); + } else if (version.getRevision() == 0) { // This is the next staged minor + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:staged")); + } else { // This is a bugfix + String project = existingBugfix ? "bugfix2" : "bugfix"; + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:" + project)); + existingBugfix = true; } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 0535026b2594..27d2a66feb20 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -8,6 +8,9 @@ */ package org.elasticsearch.gradle.internal.info; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + import org.apache.commons.io.IOUtils; import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.BwcVersions; @@ -44,11 +47,13 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.io.UncheckedIOException; import java.nio.file.Files; import java.time.ZoneOffset; import java.time.ZonedDateTime; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Random; @@ -68,6 +73,7 @@ public class GlobalBuildInfoPlugin implements Plugin { private final JavaInstallationRegistry javaInstallationRegistry; private final JvmMetadataDetector metadataDetector; private final ProviderFactory providers; + private final ObjectMapper objectMapper; private JavaToolchainService toolChainService; private Project project; @@ -82,7 +88,7 @@ public GlobalBuildInfoPlugin( this.javaInstallationRegistry = javaInstallationRegistry; this.metadataDetector = new ErrorTraceMetadataDetector(metadataDetector); this.providers = providers; - + this.objectMapper = new ObjectMapper(); } @Override @@ -190,12 +196,27 @@ private BwcVersions resolveBwcVersions() { ); try (var is = new FileInputStream(versionsFilePath)) { List versionLines = IOUtils.readLines(is, "UTF-8"); - return new BwcVersions(versionLines); + return new BwcVersions(versionLines, getDevelopmentBranches()); } catch (IOException e) { throw new IllegalStateException("Unable to resolve to resolve bwc versions from versionsFile.", e); } } + private List getDevelopmentBranches() { + List branches = new ArrayList<>(); + File branchesFile = new File(Util.locateElasticsearchWorkspace(project.getGradle()), "branches.json"); + try (InputStream is = new FileInputStream(branchesFile)) { + JsonNode json = objectMapper.readTree(is); + for (JsonNode node : json.get("branches")) { + branches.add(node.get("branch").asText()); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + return branches; + } + private void logGlobalBuildInfo(BuildParameterExtension buildParams) { final String osName = System.getProperty("os.name"); final String osVersion = System.getProperty("os.version"); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index 9c7d20d84a67..4d033564a42b 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -17,8 +17,9 @@ import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo class BwcVersionsSpec extends Specification { List versionLines = [] - def "current version is next minor with next major and last minor both staged"() { + def "current version is next major"() { given: + addVersion('7.17.10', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -29,25 +30,25 @@ class BwcVersionsSpec extends Specification { addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') addVersion('9.0.0', '10.0.0') - addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0'), ['main', '8.x', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.x', ':distribution:bwc:minor'), - (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('9.0.0'), v('9.1.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0'), v('9.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0')] } - def "current is next minor with upcoming minor staged"() { + def "current version is next major with staged minor"() { given: + addVersion('7.17.10', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -57,53 +58,106 @@ class BwcVersionsSpec extends Specification { addVersion('8.16.0', '9.10.0') addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') + addVersion('8.18.0', '9.10.0') addVersion('9.0.0', '10.0.0') - addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), + ] + bwc.wireCompatible == [v('8.18.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0')] + } + + def "current version is first new minor in major series"() { + given: + addVersion('7.17.10', '8.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.18.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.0.0') + + when: + def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.18']) + def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } + + then: + unreleased == [ + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), - (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] + bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0'), v('9.1.0')] } - def "current version is staged major"() { + def "current version is new minor with single bugfix"() { given: - addVersion('8.14.0', '9.9.0') - addVersion('8.14.1', '9.9.0') - addVersion('8.14.2', '9.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') + addVersion('7.17.10', '8.9.0') addVersion('8.16.0', '9.10.0') addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') + addVersion('8.18.0', '9.10.0') addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') + addVersion('9.1.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.18']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0')] + bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0')] + bwc.indexCompatible == [v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0')] } - def "current version is major with unreleased next minor"() { + def "current version is new minor with single bugfix and staged minor"() { given: + addVersion('7.17.10', '8.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.18.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') + addVersion('9.1.0', '10.0.0') + addVersion('9.2.0', '10.0.0') + + when: + def bwc = new BwcVersions(versionLines, v('9.2.0'), ['main', '9.1', '9.0', '8.18']) + def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } + + then: + unreleased == [ + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), '9.1', ':distribution:bwc:staged'), + (v('9.2.0')): new UnreleasedVersionInfo(v('9.2.0'), 'main', ':distribution'), + ] + bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0'), v('9.2.0')] + bwc.indexCompatible == [v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0'), v('9.2.0')] + } + + def "current version is next minor"() { + given: + addVersion('7.16.3', '8.9.0') + addVersion('7.17.0', '8.9.0') + addVersion('7.17.1', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -113,24 +167,29 @@ class BwcVersionsSpec extends Specification { addVersion('8.16.0', '9.10.0') addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') - addVersion('9.0.0', '10.0.0') + addVersion('8.17.1', '9.10.0') + addVersion('8.18.0', '9.10.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.0')) + def bwc = new BwcVersions(versionLines, v('8.18.0'), ['main', '8.x', '8.17', '8.16', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix2'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0')] + bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('8.18.0')] + bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('8.18.0')] } - def "current version is major with staged next minor"() { + def "current version is new minor with staged minor"() { given: + addVersion('7.16.3', '8.9.0') + addVersion('7.17.0', '8.9.0') + addVersion('7.17.1', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -138,26 +197,31 @@ class BwcVersionsSpec extends Specification { addVersion('8.15.1', '9.9.0') addVersion('8.15.2', '9.9.0') addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') - addVersion('9.0.0', '10.0.0') + addVersion('8.18.0', '9.10.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.0')) + def bwc = new BwcVersions(versionLines, v('8.18.0'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), - (v('8.16.0')): new UnreleasedVersionInfo(v('8.16.0'), '8.16', ':distribution:bwc:staged'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.17.0'), v('9.0.0')] + bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0')] + bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0')] } - def "current version is next bugfix"() { + def "current version is first bugfix"() { given: + addVersion('7.16.3', '8.9.0') + addVersion('7.17.0', '8.9.0') + addVersion('7.17.1', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -166,52 +230,44 @@ class BwcVersionsSpec extends Specification { addVersion('8.15.2', '9.9.0') addVersion('8.16.0', '9.10.0') addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') - addVersion('9.0.0', '10.0.0') - addVersion('9.0.1', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.1')) + def bwc = new BwcVersions(versionLines, v('8.16.1'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), - (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), 'main', ':distribution'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] + bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1')] + bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1')] } - def "current version is next minor with no staged releases"() { + def "current version is second bugfix"() { given: + addVersion('7.16.3', '8.9.0') + addVersion('7.17.0', '8.9.0') + addVersion('7.17.1', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') addVersion('8.15.0', '9.9.0') addVersion('8.15.1', '9.9.0') addVersion('8.15.2', '9.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') - addVersion('9.0.0', '10.0.0') - addVersion('9.0.1', '10.0.0') - addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0')) + def bwc = new BwcVersions(versionLines, v('8.15.2'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), - (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), - (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] + bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2')] + bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2')] } private void addVersion(String elasticsearch, String lucene) { diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java index 639dec280ae9..7512fa20814c 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java @@ -16,6 +16,7 @@ import java.io.File; import java.util.Arrays; +import java.util.List; public class AbstractDistributionDownloadPluginTests { protected static Project rootProject; @@ -28,22 +29,27 @@ public class AbstractDistributionDownloadPluginTests { protected static final Version BWC_STAGED_VERSION = Version.fromString("1.0.0"); protected static final Version BWC_BUGFIX_VERSION = Version.fromString("1.0.1"); protected static final Version BWC_MAINTENANCE_VERSION = Version.fromString("0.90.1"); + protected static final List DEVELOPMENT_BRANCHES = Arrays.asList("main", "1.1", "1.0", "0.90"); protected static final BwcVersions BWC_MINOR = new BwcVersions( BWC_MAJOR_VERSION, - Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION) + Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION), + DEVELOPMENT_BRANCHES ); protected static final BwcVersions BWC_STAGED = new BwcVersions( BWC_MAJOR_VERSION, - Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_STAGED_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION) + Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_STAGED_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION), + DEVELOPMENT_BRANCHES ); protected static final BwcVersions BWC_BUGFIX = new BwcVersions( BWC_MAJOR_VERSION, - Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION) + Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION), + DEVELOPMENT_BRANCHES ); protected static final BwcVersions BWC_MAINTENANCE = new BwcVersions( BWC_MINOR_VERSION, - Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_BUGFIX_VERSION, BWC_MINOR_VERSION) + Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_BUGFIX_VERSION, BWC_MINOR_VERSION), + DEVELOPMENT_BRANCHES ); protected static String projectName(String base, boolean bundledJdk) { diff --git a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy index f3f8e4703eba..07214b5fbf84 100644 --- a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy +++ b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy @@ -156,12 +156,12 @@ abstract class AbstractGradleFuncTest extends Specification { File internalBuild( List extraPlugins = [], - String bugfix = "7.15.2", - String bugfixLucene = "8.9.0", - String staged = "7.16.0", - String stagedLucene = "8.10.0", - String minor = "8.0.0", - String minorLucene = "9.0.0" + String maintenance = "7.16.10", + String bugfix2 = "8.1.3", + String bugfix = "8.2.1", + String staged = "8.3.0", + String minor = "8.4.0", + String current = "9.0.0" ) { buildFile << """plugins { id 'elasticsearch.global-build-info' @@ -172,15 +172,17 @@ abstract class AbstractGradleFuncTest extends Specification { import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.Version - Version currentVersion = Version.fromString("8.1.0") + Version currentVersion = Version.fromString("${current}") def versionList = [ + Version.fromString("$maintenance"), + Version.fromString("$bugfix2"), Version.fromString("$bugfix"), Version.fromString("$staged"), Version.fromString("$minor"), currentVersion ] - BwcVersions versions = new BwcVersions(currentVersion, versionList) + BwcVersions versions = new BwcVersions(currentVersion, versionList, ['main', '8.x', '8.3', '8.2', '8.1', '7.16']) buildParams.getBwcVersionsProperty().set(versions) """ } diff --git a/distribution/bwc/bugfix2/build.gradle b/distribution/bwc/bugfix2/build.gradle new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/changelog/114445.yaml b/docs/changelog/114445.yaml new file mode 100644 index 000000000000..afbc080d1e0b --- /dev/null +++ b/docs/changelog/114445.yaml @@ -0,0 +1,6 @@ +pr: 114445 +summary: Wrap jackson exception on malformed json string +area: Infra/Core +type: bug +issues: + - 114142 diff --git a/docs/changelog/117792.yaml b/docs/changelog/117792.yaml new file mode 100644 index 000000000000..2d7ddda1ace4 --- /dev/null +++ b/docs/changelog/117792.yaml @@ -0,0 +1,6 @@ +pr: 117792 +summary: Address mapping and compute engine runtime field issues +area: Mapping +type: bug +issues: + - 117644 diff --git a/docs/changelog/117914.yaml b/docs/changelog/117914.yaml new file mode 100644 index 000000000000..da58ed7bb04b --- /dev/null +++ b/docs/changelog/117914.yaml @@ -0,0 +1,5 @@ +pr: 117914 +summary: Fix for propagating filters from compound to inner retrievers +area: Ranking +type: bug +issues: [] diff --git a/docs/changelog/117963.yaml b/docs/changelog/117963.yaml new file mode 100644 index 000000000000..4a50dc175786 --- /dev/null +++ b/docs/changelog/117963.yaml @@ -0,0 +1,5 @@ +pr: 117963 +summary: '`SearchServiceTests.testParseSourceValidation` failure' +area: Search +type: bug +issues: [] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index ed93c290b6ad..4f82889f562d 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -10,7 +10,6 @@ Creates an {infer} endpoint to perform an {infer} task. * For built-in models and models uploaded through Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. ==== - [discrete] [[put-inference-api-request]] ==== {api-request-title} @@ -47,6 +46,14 @@ Refer to the service list in the <> API. In the response, look for `"state": "fully_allocated"` and ensure the `"allocation_count"` matches the `"target_allocation_count"`. +* Avoid creating multiple endpoints for the same model unless required, as each endpoint consumes significant resources. +==== + + The following services are available through the {infer} API. You can find the available task types next to the service name. Click the links to review the configuration details of the services: diff --git a/libs/entitlement/src/main/java/module-info.java b/libs/entitlement/src/main/java/module-info.java index 54075ba60bbe..b8a125b98e64 100644 --- a/libs/entitlement/src/main/java/module-info.java +++ b/libs/entitlement/src/main/java/module-info.java @@ -17,6 +17,7 @@ requires static org.elasticsearch.entitlement.bridge; // At runtime, this will be in java.base exports org.elasticsearch.entitlement.runtime.api; + exports org.elasticsearch.entitlement.runtime.policy; exports org.elasticsearch.entitlement.instrumentation; exports org.elasticsearch.entitlement.bootstrap to org.elasticsearch.server; exports org.elasticsearch.entitlement.initialization to java.base; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 0ffab5f93969..fb694308466c 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -18,6 +18,8 @@ import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.ExitVMEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import org.elasticsearch.entitlement.runtime.policy.PolicyParser; @@ -86,9 +88,11 @@ private static Class internalNameToClass(String internalName) { private static PolicyManager createPolicyManager() throws IOException { Map pluginPolicies = createPluginPolicies(EntitlementBootstrap.bootstrapArgs().pluginData()); - // TODO: What should the name be? // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it - var serverPolicy = new Policy("server", List.of()); + var serverPolicy = new Policy( + "server", + List.of(new Scope("org.elasticsearch.server", List.of(new ExitVMEntitlement(), new CreateClassLoaderEntitlement()))) + ); return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver()); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 28a080470c04..aa63b630ed7c 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -10,7 +10,6 @@ package org.elasticsearch.entitlement.runtime.api; import org.elasticsearch.entitlement.bridge.EntitlementChecker; -import org.elasticsearch.entitlement.runtime.policy.FlagEntitlementType; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import java.net.URL; @@ -30,27 +29,27 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { @Override public void check$java_lang_System$exit(Class callerClass, int status) { - policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.SYSTEM_EXIT); + policyManager.checkExitVM(callerClass); } @Override public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) { - policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + policyManager.checkCreateClassLoader(callerClass); } @Override public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) { - policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + policyManager.checkCreateClassLoader(callerClass); } @Override public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) { - policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + policyManager.checkCreateClassLoader(callerClass); } @Override public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) { - policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + policyManager.checkCreateClassLoader(callerClass); } @Override @@ -61,6 +60,6 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { ClassLoader parent, URLStreamHandlerFactory factory ) { - policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + policyManager.checkCreateClassLoader(callerClass); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java index 708e0b87711f..138515be9ffc 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java @@ -12,5 +12,4 @@ public class CreateClassLoaderEntitlement implements Entitlement { @ExternalEntitlement public CreateClassLoaderEntitlement() {} - } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java similarity index 79% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java index d40235ee1216..c4a8fc683358 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java @@ -9,7 +9,7 @@ package org.elasticsearch.entitlement.runtime.policy; -public enum FlagEntitlementType { - SYSTEM_EXIT, - CREATE_CLASSLOADER; -} +/** + * Internal policy type (not-parseable -- not available to plugins). + */ +public class ExitVMEntitlement implements Entitlement {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java index 8df199591d3e..d0837bc09618 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java @@ -20,6 +20,9 @@ public class FileEntitlement implements Entitlement { public static final int READ_ACTION = 0x1; public static final int WRITE_ACTION = 0x2; + public static final String READ = "read"; + public static final String WRITE = "write"; + private final String path; private final int actions; @@ -29,12 +32,12 @@ public FileEntitlement(String path, List actionsList) { int actionsInt = 0; for (String actionString : actionsList) { - if ("read".equals(actionString)) { + if (READ.equals(actionString)) { if ((actionsInt & READ_ACTION) == READ_ACTION) { throw new IllegalArgumentException("file action [read] specified multiple times"); } actionsInt |= READ_ACTION; - } else if ("write".equals(actionString)) { + } else if (WRITE.equals(actionString)) { if ((actionsInt & WRITE_ACTION) == WRITE_ACTION) { throw new IllegalArgumentException("file action [write] specified multiple times"); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index b3fb5b75a1d5..a77c86d5ffd0 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -17,17 +17,45 @@ import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; public class PolicyManager { private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + static class ModuleEntitlements { + public static final ModuleEntitlements NONE = new ModuleEntitlements(List.of()); + private final IdentityHashMap, List> entitlementsByType; + + ModuleEntitlements(List entitlements) { + this.entitlementsByType = entitlements.stream() + .collect(Collectors.toMap(Entitlement::getClass, e -> new ArrayList<>(List.of(e)), (a, b) -> { + a.addAll(b); + return a; + }, IdentityHashMap::new)); + } + + public boolean hasEntitlement(Class entitlementClass) { + return entitlementsByType.containsKey(entitlementClass); + } + + public Stream getEntitlements(Class entitlementClass) { + return entitlementsByType.get(entitlementClass).stream().map(entitlementClass::cast); + } + } + + final Map moduleEntitlementsMap = new HashMap<>(); + protected final Policy serverPolicy; protected final Map pluginPolicies; private final Function, String> pluginResolver; @@ -56,27 +84,110 @@ public PolicyManager(Policy defaultPolicy, Map pluginPolicies, F this.pluginResolver = pluginResolver; } - public void checkFlagEntitlement(Class callerClass, FlagEntitlementType type) { + private static List lookupEntitlementsForModule(Policy policy, String moduleName) { + for (int i = 0; i < policy.scopes.size(); ++i) { + var scope = policy.scopes.get(i); + if (scope.name.equals(moduleName)) { + return scope.entitlements; + } + } + return null; + } + + public void checkExitVM(Class callerClass) { + checkEntitlementPresent(callerClass, ExitVMEntitlement.class); + } + + public void checkCreateClassLoader(Class callerClass) { + checkEntitlementPresent(callerClass, CreateClassLoaderEntitlement.class); + } + + private void checkEntitlementPresent(Class callerClass, Class entitlementClass) { var requestingModule = requestingModule(callerClass); if (isTriviallyAllowed(requestingModule)) { return; } - // TODO: real policy check. For now, we only allow our hardcoded System.exit policy for server. - // TODO: this will be checked using policies - if (requestingModule.isNamed() - && requestingModule.getName().equals("org.elasticsearch.server") - && (type == FlagEntitlementType.SYSTEM_EXIT || type == FlagEntitlementType.CREATE_CLASSLOADER)) { - logger.debug("Allowed: caller [{}] in module [{}] has entitlement [{}]", callerClass, requestingModule.getName(), type); + ModuleEntitlements entitlements = getEntitlementsOrThrow(callerClass, requestingModule); + if (entitlements.hasEntitlement(entitlementClass)) { + logger.debug( + () -> Strings.format( + "Entitled: caller [%s], module [%s], type [%s]", + callerClass, + requestingModule.getName(), + entitlementClass.getSimpleName() + ) + ); return; } - - // TODO: plugins policy check using pluginResolver and pluginPolicies throw new NotEntitledException( - Strings.format("Missing entitlement [%s] for caller [%s] in module [%s]", type, callerClass, requestingModule.getName()) + Strings.format( + "Missing entitlement: caller [%s], module [%s], type [%s]", + callerClass, + requestingModule.getName(), + entitlementClass.getSimpleName() + ) ); } + ModuleEntitlements getEntitlementsOrThrow(Class callerClass, Module requestingModule) { + ModuleEntitlements cachedEntitlement = moduleEntitlementsMap.get(requestingModule); + if (cachedEntitlement != null) { + if (cachedEntitlement == ModuleEntitlements.NONE) { + throw new NotEntitledException(buildModuleNoPolicyMessage(callerClass, requestingModule) + "[CACHED]"); + } + return cachedEntitlement; + } + + if (isServerModule(requestingModule)) { + var scopeName = requestingModule.getName(); + return getModuleEntitlementsOrThrow(callerClass, requestingModule, serverPolicy, scopeName); + } + + // plugins + var pluginName = pluginResolver.apply(callerClass); + if (pluginName != null) { + var pluginPolicy = pluginPolicies.get(pluginName); + if (pluginPolicy != null) { + final String scopeName; + if (requestingModule.isNamed() == false) { + scopeName = ALL_UNNAMED; + } else { + scopeName = requestingModule.getName(); + } + return getModuleEntitlementsOrThrow(callerClass, requestingModule, pluginPolicy, scopeName); + } + } + + moduleEntitlementsMap.put(requestingModule, ModuleEntitlements.NONE); + throw new NotEntitledException(buildModuleNoPolicyMessage(callerClass, requestingModule)); + } + + private static String buildModuleNoPolicyMessage(Class callerClass, Module requestingModule) { + return Strings.format("Missing entitlement policy: caller [%s], module [%s]", callerClass, requestingModule.getName()); + } + + private ModuleEntitlements getModuleEntitlementsOrThrow(Class callerClass, Module module, Policy policy, String moduleName) { + var entitlements = lookupEntitlementsForModule(policy, moduleName); + if (entitlements == null) { + // Module without entitlements - remember we don't have any + moduleEntitlementsMap.put(module, ModuleEntitlements.NONE); + throw new NotEntitledException(buildModuleNoPolicyMessage(callerClass, module)); + } + // We have a policy for this module + var classEntitlements = createClassEntitlements(entitlements); + moduleEntitlementsMap.put(module, classEntitlements); + return classEntitlements; + } + + private static boolean isServerModule(Module requestingModule) { + return requestingModule.isNamed() && requestingModule.getLayer() == ModuleLayer.boot(); + } + + private ModuleEntitlements createClassEntitlements(List entitlements) { + return new ModuleEntitlements(entitlements); + } + private static Module requestingModule(Class callerClass) { if (callerClass != null) { Module callerModule = callerClass.getModule(); @@ -102,10 +213,10 @@ private static Module requestingModule(Class callerClass) { private static boolean isTriviallyAllowed(Module requestingModule) { if (requestingModule == null) { - logger.debug("Trivially allowed: entire call stack is in composed of classes in system modules"); + logger.debug("Entitlement trivially allowed: entire call stack is in composed of classes in system modules"); return true; } - logger.trace("Not trivially allowed"); + logger.trace("Entitlement not trivially allowed"); return false; } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java new file mode 100644 index 000000000000..45bdf2e45782 --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -0,0 +1,247 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.compiler.InMemoryJavaCompiler; +import org.elasticsearch.test.jar.JarUtils; + +import java.io.IOException; +import java.lang.module.Configuration; +import java.lang.module.ModuleFinder; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static java.util.Map.entry; +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; +import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +@ESTestCase.WithoutSecurityManager +public class PolicyManagerTests extends ESTestCase { + + public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + Map.of("plugin1", createPluginPolicy("plugin.module")), + c -> "plugin1" + ); + + // Any class from the current module (unnamed) will do + var callerClass = this.getClass(); + var requestingModule = callerClass.getModule(); + + var ex = assertThrows( + "No policy for the unnamed module", + NotEntitledException.class, + () -> policyManager.getEntitlementsOrThrow(callerClass, requestingModule) + ); + + assertEquals( + "Missing entitlement policy: caller [class org.elasticsearch.entitlement.runtime.policy.PolicyManagerTests], module [null]", + ex.getMessage() + ); + assertThat(policyManager.moduleEntitlementsMap, hasEntry(requestingModule, PolicyManager.ModuleEntitlements.NONE)); + } + + public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { + var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1"); + + // Any class from the current module (unnamed) will do + var callerClass = this.getClass(); + var requestingModule = callerClass.getModule(); + + var ex = assertThrows( + "No policy for this plugin", + NotEntitledException.class, + () -> policyManager.getEntitlementsOrThrow(callerClass, requestingModule) + ); + + assertEquals( + "Missing entitlement policy: caller [class org.elasticsearch.entitlement.runtime.policy.PolicyManagerTests], module [null]", + ex.getMessage() + ); + assertThat(policyManager.moduleEntitlementsMap, hasEntry(requestingModule, PolicyManager.ModuleEntitlements.NONE)); + } + + public void testGetEntitlementsFailureIsCached() { + var policyManager = new PolicyManager(createEmptyTestServerPolicy(), Map.of(), c -> "plugin1"); + + // Any class from the current module (unnamed) will do + var callerClass = this.getClass(); + var requestingModule = callerClass.getModule(); + + assertThrows(NotEntitledException.class, () -> policyManager.getEntitlementsOrThrow(callerClass, requestingModule)); + assertThat(policyManager.moduleEntitlementsMap, hasEntry(requestingModule, PolicyManager.ModuleEntitlements.NONE)); + + // A second time + var ex = assertThrows(NotEntitledException.class, () -> policyManager.getEntitlementsOrThrow(callerClass, requestingModule)); + + assertThat(ex.getMessage(), endsWith("[CACHED]")); + // Nothing new in the map + assertThat(policyManager.moduleEntitlementsMap, aMapWithSize(1)); + } + + public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), + c -> "plugin2" + ); + + // Any class from the current module (unnamed) will do + var callerClass = this.getClass(); + var requestingModule = callerClass.getModule(); + + var entitlements = policyManager.getEntitlementsOrThrow(callerClass, requestingModule); + assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); + } + + public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotFoundException { + var policyManager = new PolicyManager(createTestServerPolicy("example"), Map.of(), c -> null); + + // Tests do not run modular, so we cannot use a server class. + // But we know that in production code the server module and its classes are in the boot layer. + // So we use a random module in the boot layer, and a random class from that module (not java.base -- it is + // loaded too early) to mimic a class that would be in the server module. + var mockServerClass = ModuleLayer.boot().findLoader("jdk.httpserver").loadClass("com.sun.net.httpserver.HttpServer"); + var requestingModule = mockServerClass.getModule(); + + var ex = assertThrows( + "No policy for this module in server", + NotEntitledException.class, + () -> policyManager.getEntitlementsOrThrow(mockServerClass, requestingModule) + ); + + assertEquals( + "Missing entitlement policy: caller [class com.sun.net.httpserver.HttpServer], module [jdk.httpserver]", + ex.getMessage() + ); + assertThat(policyManager.moduleEntitlementsMap, hasEntry(requestingModule, PolicyManager.ModuleEntitlements.NONE)); + } + + public void testGetEntitlementsReturnsEntitlementsForServerModule() throws ClassNotFoundException { + var policyManager = new PolicyManager(createTestServerPolicy("jdk.httpserver"), Map.of(), c -> null); + + // Tests do not run modular, so we cannot use a server class. + // But we know that in production code the server module and its classes are in the boot layer. + // So we use a random module in the boot layer, and a random class from that module (not java.base -- it is + // loaded too early) to mimic a class that would be in the server module. + var mockServerClass = ModuleLayer.boot().findLoader("jdk.httpserver").loadClass("com.sun.net.httpserver.HttpServer"); + var requestingModule = mockServerClass.getModule(); + + var entitlements = policyManager.getEntitlementsOrThrow(mockServerClass, requestingModule); + assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); + assertThat(entitlements.hasEntitlement(ExitVMEntitlement.class), is(true)); + } + + public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOException, ClassNotFoundException { + final Path home = createTempDir(); + + Path jar = creteMockPluginJar(home); + + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + Map.of("mock-plugin", createPluginPolicy("org.example.plugin")), + c -> "mock-plugin" + ); + + var layer = createLayerForJar(jar, "org.example.plugin"); + var mockPluginClass = layer.findLoader("org.example.plugin").loadClass("q.B"); + var requestingModule = mockPluginClass.getModule(); + + var entitlements = policyManager.getEntitlementsOrThrow(mockPluginClass, requestingModule); + assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); + assertThat( + entitlements.getEntitlements(FileEntitlement.class).toList(), + contains(transformedMatch(FileEntitlement::toString, containsString("/test/path"))) + ); + } + + public void testGetEntitlementsResultIsCached() { + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), + c -> "plugin2" + ); + + // Any class from the current module (unnamed) will do + var callerClass = this.getClass(); + var requestingModule = callerClass.getModule(); + + var entitlements = policyManager.getEntitlementsOrThrow(callerClass, requestingModule); + assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); + assertThat(policyManager.moduleEntitlementsMap, aMapWithSize(1)); + var cachedResult = policyManager.moduleEntitlementsMap.values().stream().findFirst().get(); + var entitlementsAgain = policyManager.getEntitlementsOrThrow(callerClass, requestingModule); + + // Nothing new in the map + assertThat(policyManager.moduleEntitlementsMap, aMapWithSize(1)); + assertThat(entitlementsAgain, sameInstance(cachedResult)); + } + + private static Policy createEmptyTestServerPolicy() { + return new Policy("server", List.of()); + } + + private static Policy createTestServerPolicy(String scopeName) { + return new Policy("server", List.of(new Scope(scopeName, List.of(new ExitVMEntitlement(), new CreateClassLoaderEntitlement())))); + } + + private static Policy createPluginPolicy(String... pluginModules) { + return new Policy( + "plugin", + Arrays.stream(pluginModules) + .map( + name -> new Scope( + name, + List.of(new FileEntitlement("/test/path", List.of(FileEntitlement.READ)), new CreateClassLoaderEntitlement()) + ) + ) + .toList() + ); + } + + private static Path creteMockPluginJar(Path home) throws IOException { + Path jar = home.resolve("mock-plugin.jar"); + + Map sources = Map.ofEntries( + entry("module-info", "module org.example.plugin { exports q; }"), + entry("q.B", "package q; public class B { }") + ); + + var classToBytes = InMemoryJavaCompiler.compile(sources); + JarUtils.createJarWithEntries( + jar, + Map.ofEntries(entry("module-info.class", classToBytes.get("module-info")), entry("q/B.class", classToBytes.get("q.B"))) + ); + return jar; + } + + private static ModuleLayer createLayerForJar(Path jar, String moduleName) { + Configuration cf = ModuleLayer.boot().configuration().resolve(ModuleFinder.of(jar), ModuleFinder.of(), Set.of(moduleName)); + var moduleController = ModuleLayer.defineModulesWithOneLoader( + cf, + List.of(ModuleLayer.boot()), + ClassLoader.getPlatformClassLoader() + ); + return moduleController.layer(); + } +} diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java index d42c56845d03..38ef8bc2e4ef 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java @@ -108,7 +108,11 @@ public String text() throws IOException { if (currentToken().isValue() == false) { throwOnNoText(); } - return parser.getText(); + try { + return parser.getText(); + } catch (JsonParseException e) { + throw newXContentParseException(e); + } } private void throwOnNoText() { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpTaskState.java index c4d0aef0183e..c128af69009b 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpTaskState.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpTaskState.java @@ -123,7 +123,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER; + return TransportVersions.V_8_16_0; } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java index 47ca79e3cb3b..96525d427d3e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java @@ -44,7 +44,7 @@ public class GeoIpTaskState implements PersistentTaskState, VersionedNamedWriteable { private static boolean includeSha256(TransportVersion version) { - return version.isPatchFrom(TransportVersions.V_8_15_0) || version.onOrAfter(TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER); + return version.onOrAfter(TransportVersions.V_8_15_0); } private static final ParseField DATABASES = new ParseField("databases"); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java index b6e73f3f33f7..a50fe7dee900 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java @@ -69,7 +69,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER; + return TransportVersions.V_8_16_0; } public Map getDatabases() { @@ -138,7 +138,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER; + return TransportVersions.V_8_16_0; } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java index a26364f9305e..aa48c73cf1d7 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java @@ -138,7 +138,7 @@ public DatabaseConfiguration(StreamInput in) throws IOException { } private static Provider readProvider(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_GEO_DATABASE_PROVIDERS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { return in.readNamedWriteable(Provider.class); } else { // prior to the above version, everything was always a maxmind, so this half of the if is logical @@ -154,7 +154,7 @@ public static DatabaseConfiguration parse(XContentParser parser, String id) { public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeString(name); - if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_GEO_DATABASE_PROVIDERS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeNamedWriteable(provider); } else { if (provider instanceof Maxmind maxmind) { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index 3fd5cc44a340..1d39b993cef9 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -40,6 +40,7 @@ import io.netty.handler.codec.http.HttpUtil; import io.netty.handler.codec.http.HttpVersion; +import org.apache.http.ConnectionClosedException; import org.apache.http.HttpHost; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; @@ -48,6 +49,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.IncrementalBulkService; import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; @@ -100,6 +102,7 @@ import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.concurrent.CancellationException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -110,6 +113,7 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_GRACE_PERIOD; import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.rest.RestStatus.UNAUTHORIZED; @@ -1039,8 +1043,16 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } } - public void testRespondAfterClose() throws Exception { - final String url = "/thing"; + public void testRespondAfterServiceCloseWithClientCancel() throws Exception { + runRespondAfterServiceCloseTest(true); + } + + public void testRespondAfterServiceCloseWithServerCancel() throws Exception { + runRespondAfterServiceCloseTest(false); + } + + private void runRespondAfterServiceCloseTest(boolean clientCancel) throws Exception { + final String url = "/" + randomIdentifier(); final CountDownLatch responseReleasedLatch = new CountDownLatch(1); final SubscribableListener transportClosedFuture = new SubscribableListener<>(); final CountDownLatch handlingRequestLatch = new CountDownLatch(1); @@ -1066,7 +1078,9 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th try ( Netty4HttpServerTransport transport = new Netty4HttpServerTransport( - Settings.EMPTY, + clientCancel + ? Settings.EMPTY + : Settings.builder().put(SETTING_HTTP_SERVER_SHUTDOWN_GRACE_PERIOD.getKey(), TimeValue.timeValueMillis(1)).build(), networkService, threadPool, xContentRegistry(), @@ -1082,11 +1096,24 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th transport.start(); final var address = randomFrom(transport.boundAddress().boundAddresses()).address(); try (var client = RestClient.builder(new HttpHost(address.getAddress(), address.getPort())).build()) { - client.performRequestAsync(new Request("GET", url), ActionTestUtils.wrapAsRestResponseListener(ActionListener.noop())); + final var responseExceptionFuture = new PlainActionFuture(); + final var cancellable = client.performRequestAsync( + new Request("GET", url), + ActionTestUtils.wrapAsRestResponseListener(ActionTestUtils.assertNoSuccessListener(responseExceptionFuture::onResponse)) + ); safeAwait(handlingRequestLatch); + if (clientCancel) { + threadPool.generic().execute(cancellable::cancel); + } transport.close(); transportClosedFuture.onResponse(null); safeAwait(responseReleasedLatch); + final var responseException = safeGet(responseExceptionFuture); + if (clientCancel) { + assertThat(responseException, instanceOf(CancellationException.class)); + } else { + assertThat(responseException, instanceOf(ConnectionClosedException.class)); + } } } } diff --git a/muted-tests.yml b/muted-tests.yml index 0b400f420e86..ee5e3dd42236 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -2,12 +2,6 @@ tests: - class: "org.elasticsearch.client.RestClientSingleHostIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/102717" method: "testRequestResetAndAbort" -- class: org.elasticsearch.xpack.restart.FullClusterRestartIT - method: testSingleDoc {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/111434 -- class: org.elasticsearch.xpack.restart.FullClusterRestartIT - method: testDataStreams {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/111448 - class: org.elasticsearch.smoketest.WatcherYamlRestIT method: test {p0=watcher/usage/10_basic/Test watcher usage stats output} issue: https://github.com/elastic/elasticsearch/issues/112189 @@ -103,9 +97,6 @@ tests: - class: org.elasticsearch.search.StressSearchServiceReaperIT method: testStressReaper issue: https://github.com/elastic/elasticsearch/issues/115816 -- class: org.elasticsearch.search.SearchServiceTests - method: testParseSourceValidation - issue: https://github.com/elastic/elasticsearch/issues/115936 - class: org.elasticsearch.xpack.application.connector.ConnectorIndexServiceTests issue: https://github.com/elastic/elasticsearch/issues/116087 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT @@ -245,6 +236,17 @@ tests: - class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT method: testQueryBuilderBWC {p0=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116989 +- class: org.elasticsearch.index.reindex.ReindexNodeShutdownIT + method: testReindexWithShutdown + issue: https://github.com/elastic/elasticsearch/issues/118040 +- class: org.elasticsearch.packaging.test.ConfigurationTests + method: test20HostnameSubstitution + issue: https://github.com/elastic/elasticsearch/issues/118028 +- class: org.elasticsearch.packaging.test.ArchiveTests + method: test40AutoconfigurationNotTriggeredWhenNodeIsMeantToJoinExistingCluster + issue: https://github.com/elastic/elasticsearch/issues/118029 +- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT + issue: https://github.com/elastic/elasticsearch/issues/117981 # Examples: # diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java index 369d0824bdb2..3faa88339f0a 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java @@ -74,8 +74,7 @@ public void testBulkInvalidIndexNameString() throws IOException { ResponseException responseException = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(BAD_REQUEST.getStatus())); - assertThat(responseException.getMessage(), containsString("could not parse bulk request body")); - assertThat(responseException.getMessage(), containsString("json_parse_exception")); + assertThat(responseException.getMessage(), containsString("x_content_parse_exception")); assertThat(responseException.getMessage(), containsString("Invalid UTF-8")); } diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 3c5c36565420..fcb5c20c2816 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -1947,13 +1947,13 @@ private enum ElasticsearchExceptionHandle { org.elasticsearch.ingest.IngestPipelineException.class, org.elasticsearch.ingest.IngestPipelineException::new, 182, - TransportVersions.INGEST_PIPELINE_EXCEPTION_ADDED + TransportVersions.V_8_16_0 ), INDEX_RESPONSE_WRAPPER_EXCEPTION( IndexDocFailureStoreStatus.ExceptionWithFailureStoreStatus.class, IndexDocFailureStoreStatus.ExceptionWithFailureStoreStatus::new, 183, - TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE + TransportVersions.V_8_16_0 ); final Class exceptionClass; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 2e4842912dfa..1a1219825bbb 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -104,78 +104,7 @@ static TransportVersion def(int id) { public static final TransportVersion V_8_14_0 = def(8_636_00_1); public static final TransportVersion V_8_15_0 = def(8_702_00_2); public static final TransportVersion V_8_15_2 = def(8_702_00_3); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_15 = def(8_702_00_4); - public static final TransportVersion ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS = def(8_703_00_0); - public static final TransportVersion INFERENCE_ADAPTIVE_ALLOCATIONS = def(8_704_00_0); - public static final TransportVersion INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN = def(8_705_00_0); - public static final TransportVersion ML_INFERENCE_COHERE_UNUSED_RERANK_SETTINGS_REMOVED = def(8_706_00_0); - public static final TransportVersion ENRICH_CACHE_STATS_SIZE_ADDED = def(8_707_00_0); - public static final TransportVersion ENTERPRISE_GEOIP_DOWNLOADER = def(8_708_00_0); - public static final TransportVersion NODES_STATS_ENUM_SET = def(8_709_00_0); - public static final TransportVersion MASTER_NODE_METRICS = def(8_710_00_0); - public static final TransportVersion SEGMENT_LEVEL_FIELDS_STATS = def(8_711_00_0); - public static final TransportVersion ML_ADD_DETECTION_RULE_PARAMS = def(8_712_00_0); - public static final TransportVersion FIX_VECTOR_SIMILARITY_INNER_HITS = def(8_713_00_0); - public static final TransportVersion INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN = def(8_714_00_0); - public static final TransportVersion ESQL_ATTRIBUTE_CACHED_SERIALIZATION = def(8_715_00_0); - public static final TransportVersion REGISTER_SLM_STATS = def(8_716_00_0); - public static final TransportVersion ESQL_NESTED_UNSUPPORTED = def(8_717_00_0); - public static final TransportVersion ESQL_SINGLE_VALUE_QUERY_SOURCE = def(8_718_00_0); - public static final TransportVersion ESQL_ORIGINAL_INDICES = def(8_719_00_0); - public static final TransportVersion ML_INFERENCE_EIS_INTEGRATION_ADDED = def(8_720_00_0); - public static final TransportVersion INGEST_PIPELINE_EXCEPTION_ADDED = def(8_721_00_0); - public static final TransportVersion ZDT_NANOS_SUPPORT_BROKEN = def(8_722_00_0); - public static final TransportVersion REMOVE_GLOBAL_RETENTION_FROM_TEMPLATES = def(8_723_00_0); - public static final TransportVersion RANDOM_RERANKER_RETRIEVER = def(8_724_00_0); - public static final TransportVersion ESQL_PROFILE_SLEEPS = def(8_725_00_0); - public static final TransportVersion ZDT_NANOS_SUPPORT = def(8_726_00_0); - public static final TransportVersion LTR_SERVERLESS_RELEASE = def(8_727_00_0); - public static final TransportVersion ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT = def(8_728_00_0); - public static final TransportVersion RANK_DOCS_RETRIEVER = def(8_729_00_0); - public static final TransportVersion ESQL_ES_FIELD_CACHED_SERIALIZATION = def(8_730_00_0); - public static final TransportVersion ADD_MANAGE_ROLES_PRIVILEGE = def(8_731_00_0); - public static final TransportVersion REPOSITORIES_TELEMETRY = def(8_732_00_0); - public static final TransportVersion ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED = def(8_733_00_0); - public static final TransportVersion FIELD_CAPS_RESPONSE_INDEX_MODE = def(8_734_00_0); - public static final TransportVersion GET_DATA_STREAMS_VERBOSE = def(8_735_00_0); - public static final TransportVersion ESQL_ADD_INDEX_MODE_CONCRETE_INDICES = def(8_736_00_0); - public static final TransportVersion UNASSIGNED_PRIMARY_COUNT_ON_CLUSTER_HEALTH = def(8_737_00_0); - public static final TransportVersion ESQL_AGGREGATE_EXEC_TRACKS_INTERMEDIATE_ATTRS = def(8_738_00_0); - public static final TransportVersion CCS_TELEMETRY_STATS = def(8_739_00_0); - public static final TransportVersion GLOBAL_RETENTION_TELEMETRY = def(8_740_00_0); - public static final TransportVersion ROUTING_TABLE_VERSION_REMOVED = def(8_741_00_0); - public static final TransportVersion ML_SCHEDULED_EVENT_TIME_SHIFT_CONFIGURATION = def(8_742_00_0); - public static final TransportVersion SIMULATE_COMPONENT_TEMPLATES_SUBSTITUTIONS = def(8_743_00_0); - public static final TransportVersion ML_INFERENCE_IBM_WATSONX_EMBEDDINGS_ADDED = def(8_744_00_0); - public static final TransportVersion BULK_INCREMENTAL_STATE = def(8_745_00_0); - public static final TransportVersion FAILURE_STORE_STATUS_IN_INDEX_RESPONSE = def(8_746_00_0); - public static final TransportVersion ESQL_AGGREGATION_OPERATOR_STATUS_FINISH_NANOS = def(8_747_00_0); - public static final TransportVersion ML_TELEMETRY_MEMORY_ADDED = def(8_748_00_0); - public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_TOTAL_SHARDS_PER_NODE = def(8_749_00_0); - public static final TransportVersion SEMANTIC_TEXT_SEARCH_INFERENCE_ID = def(8_750_00_0); - public static final TransportVersion ML_INFERENCE_CHUNKING_SETTINGS = def(8_751_00_0); - public static final TransportVersion SEMANTIC_QUERY_INNER_HITS = def(8_752_00_0); - public static final TransportVersion RETAIN_ILM_STEP_INFO = def(8_753_00_0); - public static final TransportVersion ADD_DATA_STREAM_OPTIONS = def(8_754_00_0); - public static final TransportVersion CCS_REMOTE_TELEMETRY_STATS = def(8_755_00_0); - public static final TransportVersion ESQL_CCS_EXECUTION_INFO = def(8_756_00_0); - public static final TransportVersion REGEX_AND_RANGE_INTERVAL_QUERIES = def(8_757_00_0); - public static final TransportVersion RRF_QUERY_REWRITE = def(8_758_00_0); - public static final TransportVersion SEARCH_FAILURE_STATS = def(8_759_00_0); - public static final TransportVersion INGEST_GEO_DATABASE_PROVIDERS = def(8_760_00_0); - public static final TransportVersion DATE_TIME_DOC_VALUES_LOCALES = def(8_761_00_0); - public static final TransportVersion FAST_REFRESH_RCO = def(8_762_00_0); - public static final TransportVersion TEXT_SIMILARITY_RERANKER_QUERY_REWRITE = def(8_763_00_0); - public static final TransportVersion SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS = def(8_764_00_0); - public static final TransportVersion RETRIEVERS_TELEMETRY_ADDED = def(8_765_00_0); - public static final TransportVersion ESQL_CACHED_STRING_SERIALIZATION = def(8_766_00_0); - public static final TransportVersion CHUNK_SENTENCE_OVERLAP_SETTING_ADDED = def(8_767_00_0); - public static final TransportVersion OPT_IN_ESQL_CCS_EXECUTION_INFO = def(8_768_00_0); - public static final TransportVersion QUERY_RULE_TEST_API = def(8_769_00_0); - public static final TransportVersion ESQL_PER_AGGREGATE_FILTER = def(8_770_00_0); - public static final TransportVersion ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT = def(8_771_00_0); - public static final TransportVersion CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY = def(8_772_00_0); - public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ_BACKPORT_8_16 = def(8_772_00_1); + public static final TransportVersion V_8_16_0 = def(8_772_00_1); public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_00_2); public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_00_3); public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_00_4); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java index e14f229f17ac..d929fb457d5d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java @@ -118,7 +118,7 @@ public Request(TimeValue masterNodeTimeout, TaskId parentTaskId, EnumSet public Request(StreamInput in) throws IOException { super(in); - this.metrics = in.getTransportVersion().onOrAfter(TransportVersions.MASTER_NODE_METRICS) + this.metrics = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readEnumSet(Metric.class) : EnumSet.of(Metric.ALLOCATIONS, Metric.FS); } @@ -127,7 +127,7 @@ public Request(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { assert out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0); super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.MASTER_NODE_METRICS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeEnumSet(metrics); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParameters.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParameters.java index d34bc3ec0dc2..c5e8f37ed3a9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParameters.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParameters.java @@ -117,7 +117,7 @@ public static Metric get(String name) { } public static void writeSetTo(StreamOutput out, EnumSet metrics) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.NODES_STATS_ENUM_SET)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeEnumSet(metrics); } else { out.writeCollection(metrics, (output, metric) -> output.writeString(metric.metricName)); @@ -125,7 +125,7 @@ public static void writeSetTo(StreamOutput out, EnumSet metrics) throws } public static EnumSet readSetFrom(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.NODES_STATS_ENUM_SET)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { return in.readEnumSet(Metric.class); } else { return in.readCollection((i) -> EnumSet.noneOf(Metric.class), (is, out) -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 9c9467db40de..b6ced0662330 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -118,7 +118,7 @@ public CreateSnapshotRequest(StreamInput in) throws IOException { waitForCompletion = in.readBoolean(); partial = in.readBoolean(); userMetadata = in.readGenericMap(); - uuid = in.getTransportVersion().onOrAfter(TransportVersions.REGISTER_SLM_STATS) ? in.readOptionalString() : null; + uuid = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readOptionalString() : null; } @Override @@ -136,7 +136,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(waitForCompletion); out.writeBoolean(partial); out.writeGenericMap(userMetadata); - if (out.getTransportVersion().onOrAfter(TransportVersions.REGISTER_SLM_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalString(uuid); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java index f99baa855404..abeb73e5d8c3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java @@ -44,14 +44,11 @@ public ClusterStatsNodeResponse(StreamInput in) throws IOException { } else { searchUsageStats = new SearchUsageStats(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.REPOSITORIES_TELEMETRY)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { repositoryUsageStats = RepositoryUsageStats.readFrom(in); - } else { - repositoryUsageStats = RepositoryUsageStats.EMPTY; - } - if (in.getTransportVersion().onOrAfter(TransportVersions.CCS_TELEMETRY_STATS)) { ccsMetrics = new CCSTelemetrySnapshot(in); } else { + repositoryUsageStats = RepositoryUsageStats.EMPTY; ccsMetrics = new CCSTelemetrySnapshot(); } } @@ -118,12 +115,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_6_0)) { searchUsageStats.writeTo(out); } - if (out.getTransportVersion().onOrAfter(TransportVersions.REPOSITORIES_TELEMETRY)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { repositoryUsageStats.writeTo(out); - } // else just drop these stats, ok for bwc - if (out.getTransportVersion().onOrAfter(TransportVersions.CCS_TELEMETRY_STATS)) { ccsMetrics.writeTo(out); - } + } // else just drop these stats, ok for bwc } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/RemoteClusterStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/RemoteClusterStatsRequest.java index 47843a91351e..6c3c5cbb50ec 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/RemoteClusterStatsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/RemoteClusterStatsRequest.java @@ -36,9 +36,9 @@ public ActionRequestValidationException validate() { @Override public void writeTo(StreamOutput out) throws IOException { - assert out.getTransportVersion().onOrAfter(TransportVersions.CCS_REMOTE_TELEMETRY_STATS) + assert out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) : "RemoteClusterStatsRequest is not supported by the remote cluster"; - if (out.getTransportVersion().before(TransportVersions.CCS_REMOTE_TELEMETRY_STATS)) { + if (out.getTransportVersion().before(TransportVersions.V_8_16_0)) { throw new UnsupportedOperationException("RemoteClusterStatsRequest is not supported by the remote cluster"); } super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java index 0f6c56fd21bd..a6e80b5efd08 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java @@ -22,8 +22,8 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.RETRIEVERS_TELEMETRY_ADDED; import static org.elasticsearch.TransportVersions.V_8_12_0; +import static org.elasticsearch.TransportVersions.V_8_16_0; /** * Holds a snapshot of the search usage statistics. @@ -71,7 +71,7 @@ public SearchUsageStats(StreamInput in) throws IOException { this.sections = in.readMap(StreamInput::readLong); this.totalSearchCount = in.readVLong(); this.rescorers = in.getTransportVersion().onOrAfter(V_8_12_0) ? in.readMap(StreamInput::readLong) : Map.of(); - this.retrievers = in.getTransportVersion().onOrAfter(RETRIEVERS_TELEMETRY_ADDED) ? in.readMap(StreamInput::readLong) : Map.of(); + this.retrievers = in.getTransportVersion().onOrAfter(V_8_16_0) ? in.readMap(StreamInput::readLong) : Map.of(); } @Override @@ -83,7 +83,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(V_8_12_0)) { out.writeMap(rescorers, StreamOutput::writeLong); } - if (out.getTransportVersion().onOrAfter(RETRIEVERS_TELEMETRY_ADDED)) { + if (out.getTransportVersion().onOrAfter(V_8_16_0)) { out.writeMap(retrievers, StreamOutput::writeLong); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 97585ea9a102..2c20daa5d7af 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionType; @@ -72,8 +73,6 @@ import java.util.function.BooleanSupplier; import java.util.stream.Collectors; -import static org.elasticsearch.TransportVersions.CCS_REMOTE_TELEMETRY_STATS; - /** * Transport action implementing _cluster/stats API. */ @@ -450,7 +449,7 @@ protected void sendItemRequest(String clusterAlias, ActionListener { - if (connection.getTransportVersion().before(CCS_REMOTE_TELEMETRY_STATS)) { + if (connection.getTransportVersion().before(TransportVersions.V_8_16_0)) { responseListener.onResponse(null); } else { remoteClusterClient.execute(connection, TransportRemoteClusterStatsAction.REMOTE_TYPE, remoteRequest, responseListener); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index c6d990e5a1d6..f729455edcc2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -131,8 +131,7 @@ public Response(StreamInput in) throws IOException { } else { rolloverConfiguration = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) - && in.getTransportVersion().before(TransportVersions.REMOVE_GLOBAL_RETENTION_FROM_TEMPLATES)) { + if (in.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { in.readOptionalWriteable(DataStreamGlobalRetention::read); } } @@ -190,8 +189,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) - && out.getTransportVersion().before(TransportVersions.REMOVE_GLOBAL_RETENTION_FROM_TEMPLATES)) { + if (out.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(null); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index a47f89030cc6..67f87476ea6a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -132,8 +132,7 @@ public Response(StreamInput in) throws IOException { } else { rolloverConfiguration = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) - && in.getTransportVersion().before(TransportVersions.REMOVE_GLOBAL_RETENTION_FROM_TEMPLATES)) { + if (in.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { in.readOptionalWriteable(DataStreamGlobalRetention::read); } } @@ -191,8 +190,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) - && out.getTransportVersion().before(TransportVersions.REMOVE_GLOBAL_RETENTION_FROM_TEMPLATES)) { + if (out.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(null); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java index 064c24cf4afa..80e6fbfe051a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java @@ -82,8 +82,7 @@ public SimulateIndexTemplateResponse(StreamInput in) throws IOException { rolloverConfiguration = in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(RolloverConfiguration::new) : null; - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) - && in.getTransportVersion().before(TransportVersions.REMOVE_GLOBAL_RETENTION_FROM_TEMPLATES)) { + if (in.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { in.readOptionalWriteable(DataStreamGlobalRetention::read); } } @@ -104,8 +103,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) - && out.getTransportVersion().before(TransportVersions.REMOVE_GLOBAL_RETENTION_FROM_TEMPLATES)) { + if (out.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(null); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java index d5931c85bb2e..1ff970de7525 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java @@ -200,7 +200,7 @@ public Failure(StreamInput in) throws IOException { seqNo = in.readZLong(); term = in.readVLong(); aborted = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { failureStoreStatus = IndexDocFailureStoreStatus.read(in); } else { failureStoreStatus = IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN; @@ -218,7 +218,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeZLong(seqNo); out.writeVLong(term); out.writeBoolean(aborted); - if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { failureStoreStatus.writeTo(out); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index f62b2f48fa2f..91caebc420ff 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -98,7 +98,7 @@ public BulkRequest(StreamInput in) throws IOException { for (DocWriteRequest request : requests) { indices.add(Objects.requireNonNull(request.index(), "request index must not be null")); } - if (in.getTransportVersion().onOrAfter(TransportVersions.BULK_INCREMENTAL_STATE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { incrementalState = new BulkRequest.IncrementalState(in); } else { incrementalState = BulkRequest.IncrementalState.EMPTY; @@ -454,7 +454,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(requests, DocWriteRequest::writeDocumentRequest); refreshPolicy.writeTo(out); out.writeTimeValue(timeout); - if (out.getTransportVersion().onOrAfter(TransportVersions.BULK_INCREMENTAL_STATE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { incrementalState.writeTo(out); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java index ec7a08007de9..12d3aa67ca9b 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java @@ -46,7 +46,7 @@ public BulkResponse(StreamInput in) throws IOException { responses = in.readArray(BulkItemResponse::new, BulkItemResponse[]::new); tookInMillis = in.readVLong(); ingestTookInMillis = in.readZLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.BULK_INCREMENTAL_STATE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { incrementalState = new BulkRequest.IncrementalState(in); } else { incrementalState = BulkRequest.IncrementalState.EMPTY; @@ -151,7 +151,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeArray(responses); out.writeVLong(tookInMillis); out.writeZLong(ingestTookInMillis); - if (out.getTransportVersion().onOrAfter(TransportVersions.BULK_INCREMENTAL_STATE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { incrementalState.writeTo(out); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/IndexDocFailureStoreStatus.java b/server/src/main/java/org/elasticsearch/action/bulk/IndexDocFailureStoreStatus.java index cb83d693a415..7367dfa1d53f 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/IndexDocFailureStoreStatus.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/IndexDocFailureStoreStatus.java @@ -124,7 +124,7 @@ public ExceptionWithFailureStoreStatus(BulkItemResponse.Failure failure) { public ExceptionWithFailureStoreStatus(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { failureStoreStatus = IndexDocFailureStoreStatus.fromId(in.readByte()); } else { failureStoreStatus = NOT_APPLICABLE_OR_UNKNOWN; @@ -134,7 +134,7 @@ public ExceptionWithFailureStoreStatus(StreamInput in) throws IOException { @Override protected void writeTo(StreamOutput out, Writer nestedExceptionsWriter) throws IOException { super.writeTo(out, nestedExceptionsWriter); - if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeByte(failureStoreStatus.getId()); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java index cc7fd431d809..290d342e9dc1 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java @@ -135,14 +135,11 @@ public SimulateBulkRequest( public SimulateBulkRequest(StreamInput in) throws IOException { super(in); this.pipelineSubstitutions = (Map>) in.readGenericValue(); - if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_COMPONENT_TEMPLATES_SUBSTITUTIONS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.componentTemplateSubstitutions = (Map>) in.readGenericValue(); - } else { - componentTemplateSubstitutions = Map.of(); - } - if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS)) { this.indexTemplateSubstitutions = (Map>) in.readGenericValue(); } else { + componentTemplateSubstitutions = Map.of(); indexTemplateSubstitutions = Map.of(); } if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) { @@ -156,10 +153,8 @@ public SimulateBulkRequest(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeGenericValue(pipelineSubstitutions); - if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_COMPONENT_TEMPLATES_SUBSTITUTIONS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeGenericValue(componentTemplateSubstitutions); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS)) { out.writeGenericValue(indexTemplateSubstitutions); } if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index c1cf0fa7aab4..93c40ad18cc8 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -112,7 +112,7 @@ public Request(StreamInput in) throws IOException { } else { this.includeDefaults = false; } - if (in.getTransportVersion().onOrAfter(TransportVersions.GET_DATA_STREAMS_VERBOSE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.verbose = in.readBoolean(); } else { this.verbose = false; @@ -127,7 +127,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(includeDefaults); } - if (out.getTransportVersion().onOrAfter(TransportVersions.GET_DATA_STREAMS_VERBOSE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeBoolean(verbose); } } @@ -275,7 +275,7 @@ public DataStreamInfo( in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0) ? in.readOptionalWriteable(TimeSeries::new) : null, in.getTransportVersion().onOrAfter(V_8_11_X) ? in.readMap(Index::new, IndexProperties::new) : Map.of(), in.getTransportVersion().onOrAfter(V_8_11_X) ? in.readBoolean() : true, - in.getTransportVersion().onOrAfter(TransportVersions.GET_DATA_STREAMS_VERBOSE) ? in.readOptionalVLong() : null + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readOptionalVLong() : null ); } @@ -328,7 +328,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(indexSettingsValues); out.writeBoolean(templatePreferIlmValue); } - if (out.getTransportVersion().onOrAfter(TransportVersions.GET_DATA_STREAMS_VERBOSE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalVLong(maximumTimestamp); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java index d16100a64713..6f510ad26f5e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java @@ -62,7 +62,7 @@ public FieldCapabilitiesIndexResponse( } else { this.indexMappingHash = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_RESPONSE_INDEX_MODE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.indexMode = IndexMode.readFrom(in); } else { this.indexMode = IndexMode.STANDARD; @@ -77,7 +77,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(MAPPING_HASH_VERSION)) { out.writeOptionalString(indexMappingHash); } - if (out.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_RESPONSE_INDEX_MODE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { IndexMode.writeTo(indexMode, out); } } @@ -105,7 +105,7 @@ static List readList(StreamInput input) throws I private static void collectCompressedResponses(StreamInput input, int groups, ArrayList responses) throws IOException { final CompressedGroup[] compressedGroups = new CompressedGroup[groups]; - final boolean readIndexMode = input.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_RESPONSE_INDEX_MODE); + final boolean readIndexMode = input.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0); for (int i = 0; i < groups; i++) { final String[] indices = input.readStringArray(); final IndexMode indexMode = readIndexMode ? IndexMode.readFrom(input) : IndexMode.STANDARD; @@ -179,7 +179,7 @@ private static void writeCompressedResponses(StreamOutput output, Map { o.writeCollection(fieldCapabilitiesIndexResponses, (oo, r) -> oo.writeString(r.indexName)); var first = fieldCapabilitiesIndexResponses.get(0); - if (output.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_RESPONSE_INDEX_MODE)) { + if (output.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { IndexMode.writeTo(first.indexMode, o); } o.writeString(first.indexMappingHash); diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index c0811e7424b0..5254c6fd06db 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -205,10 +205,8 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { in.readZLong(); // obsolete normalisedBytesParsed } - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { in.readBoolean(); // obsolete originatesFromUpdateByScript - } - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) { in.readBoolean(); // obsolete originatesFromUpdateByDoc } } @@ -789,10 +787,8 @@ private void writeBody(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeZLong(-1); // obsolete normalisedBytesParsed } - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeBoolean(false); // obsolete originatesFromUpdateByScript - } - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) { out.writeBoolean(false); // obsolete originatesFromUpdateByDoc } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java b/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java index 8d1bdf227e24..7c45de890517 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java @@ -46,7 +46,7 @@ public IndexResponse(ShardId shardId, StreamInput in) throws IOException { } else { executedPipelines = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { failureStoreStatus = IndexDocFailureStoreStatus.read(in); } else { failureStoreStatus = IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN; @@ -60,7 +60,7 @@ public IndexResponse(StreamInput in) throws IOException { } else { executedPipelines = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { failureStoreStatus = IndexDocFailureStoreStatus.read(in); } else { failureStoreStatus = IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN; @@ -126,7 +126,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeOptionalCollection(executedPipelines, StreamOutput::writeString); } - if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { failureStoreStatus.writeTo(out); } } @@ -137,7 +137,7 @@ public void writeThin(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeOptionalCollection(executedPipelines, StreamOutput::writeString); } - if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_STATUS_IN_INDEX_RESPONSE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { failureStoreStatus.writeTo(out); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java index 969ba2ad983c..d68e2ce1b02b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java @@ -63,7 +63,7 @@ public OpenPointInTimeRequest(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { this.indexFilter = in.readOptionalNamedWriteable(QueryBuilder.class); } - if (in.getTransportVersion().onOrAfter(TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.allowPartialSearchResults = in.readBoolean(); } } @@ -82,7 +82,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeOptionalWriteable(indexFilter); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeBoolean(allowPartialSearchResults); } else if (allowPartialSearchResults) { throw new IOException("[allow_partial_search_results] is not supported on nodes with version " + out.getTransportVersion()); diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java index 3c830c8ed9dc..b3ffc564d848 100644 --- a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java @@ -47,7 +47,7 @@ public OpenPointInTimeResponse( @Override public void writeTo(StreamOutput out) throws IOException { out.writeBytesReference(pointInTimeId); - if (out.getTransportVersion().onOrAfter(TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVInt(totalShards); out.writeVInt(successfulShards); out.writeVInt(failedShards); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java b/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java index ca810bb88653..c2f1510341fb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java @@ -63,14 +63,14 @@ public static BytesReference encode( TransportVersion version, ShardSearchFailure[] shardFailures ) { - assert shardFailures.length == 0 || version.onOrAfter(TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT) + assert shardFailures.length == 0 || version.onOrAfter(TransportVersions.V_8_16_0) : "[allow_partial_search_results] cannot be enabled on a cluster that has not been fully upgraded to version [" - + TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT + + TransportVersions.V_8_16_0.toReleaseVersion() + "] or higher."; try (var out = new BytesStreamOutput()) { out.setTransportVersion(version); TransportVersion.writeVersion(version, out); - boolean allowNullContextId = out.getTransportVersion().onOrAfter(TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT); + boolean allowNullContextId = out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0); int shardSize = searchPhaseResults.size() + (allowNullContextId ? shardFailures.length : 0); out.writeVInt(shardSize); for (var searchResult : searchPhaseResults) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchContextIdForNode.java b/server/src/main/java/org/elasticsearch/action/search/SearchContextIdForNode.java index 7509a7b0fed0..f91a9d09f4bb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchContextIdForNode.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchContextIdForNode.java @@ -37,7 +37,7 @@ public final class SearchContextIdForNode implements Writeable { } SearchContextIdForNode(StreamInput in) throws IOException { - boolean allowNull = in.getTransportVersion().onOrAfter(TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT); + boolean allowNull = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0); this.node = allowNull ? in.readOptionalString() : in.readString(); this.clusterAlias = in.readOptionalString(); this.searchContextId = allowNull ? in.readOptionalWriteable(ShardSearchContextId::new) : new ShardSearchContextId(in); @@ -45,7 +45,7 @@ public final class SearchContextIdForNode implements Writeable { @Override public void writeTo(StreamOutput out) throws IOException { - boolean allowNull = out.getTransportVersion().onOrAfter(TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT); + boolean allowNull = out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0); if (allowNull) { out.writeOptionalString(node); } else { @@ -53,7 +53,7 @@ public void writeTo(StreamOutput out) throws IOException { // We should never set a null node if the cluster is not fully upgraded to a version that can handle it. throw new IOException( "Cannot write null node value to a node in version " - + out.getTransportVersion() + + out.getTransportVersion().toReleaseVersion() + ". The target node must be specified to retrieve the ShardSearchContextId." ); } @@ -67,7 +67,7 @@ public void writeTo(StreamOutput out) throws IOException { // We should never set a null search context id if the cluster is not fully upgraded to a version that can handle it. throw new IOException( "Cannot write null search context ID to a node in version " - + out.getTransportVersion() + + out.getTransportVersion().toReleaseVersion() + ". A valid search context ID is required to identify the shard's search context in this version." ); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 9e60eedbad6a..36ca0fba9437 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -104,8 +104,7 @@ public TransportOpenPointInTimeAction( protected void doExecute(Task task, OpenPointInTimeRequest request, ActionListener listener) { final ClusterState clusterState = clusterService.state(); // Check if all the nodes in this cluster know about the service - if (request.allowPartialSearchResults() - && clusterState.getMinTransportVersion().before(TransportVersions.ALLOW_PARTIAL_SEARCH_RESULTS_IN_PIT)) { + if (request.allowPartialSearchResults() && clusterState.getMinTransportVersion().before(TransportVersions.V_8_16_0)) { listener.onFailure( new ElasticsearchStatusException( format( diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 85889d8398cb..ebbd47336e3d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -982,12 +982,11 @@ public void writeIndicesOptions(StreamOutput out) throws IOException { states.add(WildcardStates.HIDDEN); } out.writeEnumSet(states); - if (out.getTransportVersion() - .between(TransportVersions.V_8_14_0, TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { + if (out.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { out.writeBoolean(includeRegularIndices()); out.writeBoolean(includeFailureIndices()); } - if (out.getTransportVersion().onOrAfter(TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { selectorOptions.writeTo(out); } } @@ -1010,8 +1009,7 @@ public static IndicesOptions readIndicesOptions(StreamInput in) throws IOExcepti .ignoreThrottled(options.contains(Option.IGNORE_THROTTLED)) .build(); SelectorOptions selectorOptions = SelectorOptions.DEFAULT; - if (in.getTransportVersion() - .between(TransportVersions.V_8_14_0, TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { + if (in.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { // Reading from an older node, which will be sending two booleans that we must read out and ignore. var includeData = in.readBoolean(); var includeFailures = in.readBoolean(); @@ -1023,7 +1021,7 @@ public static IndicesOptions readIndicesOptions(StreamInput in) throws IOExcepti selectorOptions = SelectorOptions.FAILURES; } } - if (in.getTransportVersion().onOrAfter(TransportVersions.CONVERT_FAILURE_STORE_OPTIONS_TO_SELECTOR_OPTIONS_INTERNALLY)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { selectorOptions = SelectorOptions.read(in); } return new IndicesOptions( diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index c06ea9305aef..27cbb39c05d3 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -42,9 +42,7 @@ import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; -import org.elasticsearch.plugins.PluginBundle; import org.elasticsearch.plugins.PluginsLoader; -import org.elasticsearch.plugins.PluginsUtils; import java.io.IOException; import java.io.InputStream; @@ -54,10 +52,8 @@ import java.nio.file.Path; import java.security.Permission; import java.security.Security; -import java.util.ArrayList; import java.util.List; import java.util.Objects; -import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -208,21 +204,17 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { // load the plugin Java modules and layers now for use in entitlements var pluginsLoader = PluginsLoader.createPluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile()); bootstrap.setPluginsLoader(pluginsLoader); + var pluginsResolver = PluginsResolver.create(pluginsLoader); if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { LogManager.getLogger(Elasticsearch.class).info("Bootstrapping Entitlements"); - List> pluginData = new ArrayList<>(); - Set moduleBundles = PluginsUtils.getModuleBundles(nodeEnv.modulesFile()); - for (PluginBundle moduleBundle : moduleBundles) { - pluginData.add(Tuple.tuple(moduleBundle.getDir(), moduleBundle.pluginDescriptor().isModular())); - } - Set pluginBundles = PluginsUtils.getPluginBundles(nodeEnv.pluginsFile()); - for (PluginBundle pluginBundle : pluginBundles) { - pluginData.add(Tuple.tuple(pluginBundle.getDir(), pluginBundle.pluginDescriptor().isModular())); - } - // TODO: add a functor to map module to plugin name - EntitlementBootstrap.bootstrap(pluginData, callerClass -> null); + List> pluginData = pluginsLoader.allBundles() + .stream() + .map(bundle -> Tuple.tuple(bundle.getDir(), bundle.pluginDescriptor().isModular())) + .toList(); + + EntitlementBootstrap.bootstrap(pluginData, pluginsResolver::resolveClassToPluginName); } else { // install SM after natives, shutdown hooks, etc. LogManager.getLogger(Elasticsearch.class).info("Bootstrapping java SecurityManager"); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/PluginsResolver.java b/server/src/main/java/org/elasticsearch/bootstrap/PluginsResolver.java new file mode 100644 index 000000000000..256e91cbee16 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/bootstrap/PluginsResolver.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.plugins.PluginsLoader; + +import java.util.HashMap; +import java.util.Map; + +class PluginsResolver { + private final Map pluginNameByModule; + + private PluginsResolver(Map pluginNameByModule) { + this.pluginNameByModule = pluginNameByModule; + } + + public static PluginsResolver create(PluginsLoader pluginsLoader) { + Map pluginNameByModule = new HashMap<>(); + + pluginsLoader.pluginLayers().forEach(pluginLayer -> { + var pluginName = pluginLayer.pluginBundle().pluginDescriptor().getName(); + if (pluginLayer.pluginModuleLayer() != null && pluginLayer.pluginModuleLayer() != ModuleLayer.boot()) { + // This plugin is a Java Module + for (var module : pluginLayer.pluginModuleLayer().modules()) { + pluginNameByModule.put(module, pluginName); + } + } else { + // This plugin is not modularized + pluginNameByModule.put(pluginLayer.pluginClassLoader().getUnnamedModule(), pluginName); + } + }); + + return new PluginsResolver(pluginNameByModule); + } + + public String resolveClassToPluginName(Class clazz) { + var module = clazz.getModule(); + return pluginNameByModule.get(module); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java index b6c1defe91a7..9cf567c21966 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java @@ -111,7 +111,7 @@ public ClusterIndexHealth(final StreamInput in) throws IOException { unassignedShards = in.readVInt(); status = ClusterHealthStatus.readFrom(in); shards = in.readMapValues(ClusterShardHealth::new, ClusterShardHealth::getShardId); - if (in.getTransportVersion().onOrAfter(TransportVersions.UNASSIGNED_PRIMARY_COUNT_ON_CLUSTER_HEALTH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { unassignedPrimaryShards = in.readVInt(); } else { unassignedPrimaryShards = 0; @@ -203,7 +203,7 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeVInt(unassignedShards); out.writeByte(status.value()); out.writeMapValues(shards); - if (out.getTransportVersion().onOrAfter(TransportVersions.UNASSIGNED_PRIMARY_COUNT_ON_CLUSTER_HEALTH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVInt(unassignedPrimaryShards); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java index 63863542564c..f512acb6e04d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java @@ -96,7 +96,7 @@ public ClusterShardHealth(final StreamInput in) throws IOException { initializingShards = in.readVInt(); unassignedShards = in.readVInt(); primaryActive = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.UNASSIGNED_PRIMARY_COUNT_ON_CLUSTER_HEALTH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { unassignedPrimaryShards = in.readVInt(); } else { unassignedPrimaryShards = 0; @@ -167,7 +167,7 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeVInt(initializingShards); out.writeVInt(unassignedShards); out.writeBoolean(primaryActive); - if (out.getTransportVersion().onOrAfter(TransportVersions.UNASSIGNED_PRIMARY_COUNT_ON_CLUSTER_HEALTH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVInt(unassignedPrimaryShards); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java index 579429b5d51d..31f275e29c36 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java @@ -120,7 +120,7 @@ public ClusterStateHealth(final StreamInput in) throws IOException { status = ClusterHealthStatus.readFrom(in); indices = in.readMapValues(ClusterIndexHealth::new, ClusterIndexHealth::getIndex); activeShardsPercent = in.readDouble(); - if (in.getTransportVersion().onOrAfter(TransportVersions.UNASSIGNED_PRIMARY_COUNT_ON_CLUSTER_HEALTH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { unassignedPrimaryShards = in.readVInt(); } else { unassignedPrimaryShards = 0; @@ -212,7 +212,7 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeByte(status.value()); out.writeMapValues(indices); out.writeDouble(activeShardsPercent); - if (out.getTransportVersion().onOrAfter(TransportVersions.UNASSIGNED_PRIMARY_COUNT_ON_CLUSTER_HEALTH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVInt(unassignedPrimaryShards); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 4dcc7c73c280..979434950cf7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -71,6 +71,7 @@ public final class DataStream implements SimpleDiffable, ToXContentO public static final FeatureFlag FAILURE_STORE_FEATURE_FLAG = new FeatureFlag("failure_store"); public static final TransportVersion ADDED_FAILURE_STORE_TRANSPORT_VERSION = TransportVersions.V_8_12_0; public static final TransportVersion ADDED_AUTO_SHARDING_EVENT_VERSION = TransportVersions.V_8_14_0; + public static final TransportVersion ADD_DATA_STREAM_OPTIONS_VERSION = TransportVersions.V_8_16_0; public static boolean isFailureStoreFeatureFlagEnabled() { return FAILURE_STORE_FEATURE_FLAG.isEnabled(); @@ -200,9 +201,7 @@ public static DataStream read(StreamInput in) throws IOException { : null; // This boolean flag has been moved in data stream options var failureStoreEnabled = in.getTransportVersion() - .between(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION, TransportVersions.ADD_DATA_STREAM_OPTIONS) - ? in.readBoolean() - : false; + .between(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION, TransportVersions.V_8_16_0) ? in.readBoolean() : false; var failureIndices = in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? readIndices(in) : List.of(); @@ -216,7 +215,7 @@ public static DataStream read(StreamInput in) throws IOException { .setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); } DataStreamOptions dataStreamOptions; - if (in.getTransportVersion().onOrAfter(TransportVersions.ADD_DATA_STREAM_OPTIONS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { dataStreamOptions = in.readOptionalWriteable(DataStreamOptions::read); } else { // We cannot distinguish if failure store was explicitly disabled or not. Given that failure store @@ -1077,7 +1076,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(lifecycle); } if (out.getTransportVersion() - .between(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION, TransportVersions.ADD_DATA_STREAM_OPTIONS)) { + .between(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION, DataStream.ADD_DATA_STREAM_OPTIONS_VERSION)) { out.writeBoolean(isFailureStoreEnabled()); } if (out.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION)) { @@ -1093,7 +1092,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(failureIndices.rolloverOnWrite); out.writeOptionalWriteable(failureIndices.autoShardingEvent); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ADD_DATA_STREAM_OPTIONS)) { + if (out.getTransportVersion().onOrAfter(DataStream.ADD_DATA_STREAM_OPTIONS_VERSION)) { out.writeOptionalWriteable(dataStreamOptions.isEmpty() ? null : dataStreamOptions); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java index 271c60e829a8..8917d5a9cbbb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.io.stream.StreamInput; @@ -23,8 +24,6 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.TransportVersions.SEMANTIC_TEXT_SEARCH_INFERENCE_ID; - /** * Contains inference field data for fields. * As inference is done in the coordinator node to avoid re-doing it at shard / replica level, the coordinator needs to check for the need @@ -56,7 +55,7 @@ public InferenceFieldMetadata(String name, String inferenceId, String searchInfe public InferenceFieldMetadata(StreamInput input) throws IOException { this.name = input.readString(); this.inferenceId = input.readString(); - if (input.getTransportVersion().onOrAfter(SEMANTIC_TEXT_SEARCH_INFERENCE_ID)) { + if (input.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.searchInferenceId = input.readString(); } else { this.searchInferenceId = this.inferenceId; @@ -68,7 +67,7 @@ public InferenceFieldMetadata(StreamInput input) throws IOException { public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeString(inferenceId); - if (out.getTransportVersion().onOrAfter(SEMANTIC_TEXT_SEARCH_INFERENCE_ID)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeString(searchInferenceId); } out.writeStringArray(sourceFields); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 790b8e4ab75f..60cf6b10417f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -317,7 +317,7 @@ public static Diff readDiffFrom(StreamInput in) throws IOException public static RoutingTable readFrom(StreamInput in) throws IOException { Builder builder = new Builder(); - if (in.getTransportVersion().before(TransportVersions.ROUTING_TABLE_VERSION_REMOVED)) { + if (in.getTransportVersion().before(TransportVersions.V_8_16_0)) { in.readLong(); // previously 'version', unused in all applicable versions so any number will do } int size = in.readVInt(); @@ -331,7 +331,7 @@ public static RoutingTable readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.ROUTING_TABLE_VERSION_REMOVED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_16_0)) { out.writeLong(0); // previously 'version', unused in all applicable versions so any number will do } out.writeCollection(indicesRouting.values()); @@ -349,7 +349,7 @@ private static class RoutingTableDiff implements Diff { new DiffableUtils.DiffableValueReader<>(IndexRoutingTable::readFrom, IndexRoutingTable::readDiffFrom); RoutingTableDiff(StreamInput in) throws IOException { - if (in.getTransportVersion().before(TransportVersions.ROUTING_TABLE_VERSION_REMOVED)) { + if (in.getTransportVersion().before(TransportVersions.V_8_16_0)) { in.readLong(); // previously 'version', unused in all applicable versions so any number will do } indicesRouting = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), DIFF_VALUE_READER); @@ -366,7 +366,7 @@ public RoutingTable apply(RoutingTable part) { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.ROUTING_TABLE_VERSION_REMOVED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_16_0)) { out.writeLong(0); // previously 'version', unused in all applicable versions so any number will do } indicesRouting.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 644cc6bb6992..e07861ba0543 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -908,11 +908,8 @@ public final Instant readOptionalInstant() throws IOException { private ZonedDateTime readZonedDateTime() throws IOException { final String timeZoneId = readString(); final Instant instant; - if (getTransportVersion().onOrAfter(TransportVersions.ZDT_NANOS_SUPPORT_BROKEN)) { - // epoch seconds can be negative, but it was incorrectly first written as vlong - boolean zlong = getTransportVersion().onOrAfter(TransportVersions.ZDT_NANOS_SUPPORT); - long seconds = zlong ? readZLong() : readVLong(); - instant = Instant.ofEpochSecond(seconds, readInt()); + if (getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { + instant = Instant.ofEpochSecond(readZLong(), readInt()); } else { instant = Instant.ofEpochMilli(readLong()); } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index d724e5ea25ca..6738af32f04d 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -768,13 +768,8 @@ public final void writeOptionalInstant(@Nullable Instant instant) throws IOExcep final ZonedDateTime zonedDateTime = (ZonedDateTime) v; o.writeString(zonedDateTime.getZone().getId()); Instant instant = zonedDateTime.toInstant(); - if (o.getTransportVersion().onOrAfter(TransportVersions.ZDT_NANOS_SUPPORT_BROKEN)) { - // epoch seconds can be negative, but it was incorrectly first written as vlong - if (o.getTransportVersion().onOrAfter(TransportVersions.ZDT_NANOS_SUPPORT)) { - o.writeZLong(instant.getEpochSecond()); - } else { - o.writeVLong(instant.getEpochSecond()); - } + if (o.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { + o.writeZLong(instant.getEpochSecond()); o.writeInt(instant.getNano()); } else { o.writeLong(instant.toEpochMilli()); diff --git a/server/src/main/java/org/elasticsearch/index/engine/CommitStats.java b/server/src/main/java/org/elasticsearch/index/engine/CommitStats.java index a871524b45e9..520174a4b363 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/CommitStats.java +++ b/server/src/main/java/org/elasticsearch/index/engine/CommitStats.java @@ -46,7 +46,7 @@ public CommitStats(SegmentInfos segmentInfos) { generation = in.readLong(); id = in.readOptionalString(); numDocs = in.readInt(); - numLeaves = in.getTransportVersion().onOrAfter(TransportVersions.SEGMENT_LEVEL_FIELDS_STATS) ? in.readVInt() : 0; + numLeaves = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readVInt() : 0; } @Override @@ -100,7 +100,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(generation); out.writeOptionalString(id); out.writeInt(numDocs); - if (out.getTransportVersion().onOrAfter(TransportVersions.SEGMENT_LEVEL_FIELDS_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVInt(numLeaves); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index e00e7b232000..9ddb6f0d496a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -946,7 +946,9 @@ public Query termQuery(Object value, SearchExecutionContext context) { protected void parseCreateField(DocumentParserContext context) { // Run-time fields are mapped to this mapper, so it needs to handle storing values for use in synthetic source. // #parseValue calls this method once the run-time field is created. - if (context.dynamic() == ObjectMapper.Dynamic.RUNTIME && context.canAddIgnoredField()) { + var fieldType = context.mappingLookup().getFieldType(path); + boolean isRuntimeField = fieldType instanceof AbstractScriptFieldType; + if ((context.dynamic() == ObjectMapper.Dynamic.RUNTIME || isRuntimeField) && context.canAddIgnoredField()) { try { context.addIgnoredField( IgnoredSourceFieldMapper.NameValue.fromContext(context, path, context.encodeFlattenedToken()) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NodeMappingStats.java b/server/src/main/java/org/elasticsearch/index/mapper/NodeMappingStats.java index 56210a292995..10b085654039 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NodeMappingStats.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NodeMappingStats.java @@ -52,7 +52,7 @@ public NodeMappingStats() { public NodeMappingStats(StreamInput in) throws IOException { totalCount = in.readVLong(); totalEstimatedOverhead = in.readVLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.SEGMENT_LEVEL_FIELDS_STATS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { totalSegments = in.readVLong(); totalSegmentFields = in.readVLong(); } @@ -93,7 +93,7 @@ public long getTotalSegmentFields() { public void writeTo(StreamOutput out) throws IOException { out.writeVLong(totalCount); out.writeVLong(totalEstimatedOverhead); - if (out.getTransportVersion().onOrAfter(TransportVersions.SEGMENT_LEVEL_FIELDS_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVLong(totalSegments); out.writeVLong(totalSegmentFields); } diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java index 647e45d1beda..6ae0c4872cfa 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -825,7 +825,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.REGEX_AND_RANGE_INTERVAL_QUERIES; + return TransportVersions.V_8_16_0; } @Override @@ -1129,7 +1129,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.REGEX_AND_RANGE_INTERVAL_QUERIES; + return TransportVersions.V_8_16_0; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 83bca7d27aee..503b2adf756f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -321,8 +321,7 @@ public static Query toQuery( // ToParentBlockJoinQuery requires that the inner query only matches documents // in its child space - NestedHelper nestedHelper = new NestedHelper(context.nestedLookup(), context::isFieldMapped); - if (nestedHelper.mightMatchNonNestedDocs(innerQuery, path)) { + if (NestedHelper.mightMatchNonNestedDocs(innerQuery, path, context)) { innerQuery = Queries.filtered(innerQuery, mapper.nestedTypeFilter()); } diff --git a/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java index 33077697a2ce..889fa40b79aa 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java @@ -25,8 +25,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.RRF_QUERY_REWRITE; - public class RankDocsQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "rank_docs_query"; @@ -44,7 +42,7 @@ public RankDocsQueryBuilder(RankDoc[] rankDocs, QueryBuilder[] queryBuilders, bo public RankDocsQueryBuilder(StreamInput in) throws IOException { super(in); this.rankDocs = in.readArray(c -> c.readNamedWriteable(RankDoc.class), RankDoc[]::new); - if (in.getTransportVersion().onOrAfter(RRF_QUERY_REWRITE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.queryBuilders = in.readOptionalArray(c -> c.readNamedWriteable(QueryBuilder.class), QueryBuilder[]::new); this.onlyRankDocs = in.readBoolean(); } else { @@ -85,7 +83,7 @@ public RankDoc[] rankDocs() { @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeArray(StreamOutput::writeNamedWriteable, rankDocs); - if (out.getTransportVersion().onOrAfter(RRF_QUERY_REWRITE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalArray(StreamOutput::writeNamedWriteable, queryBuilders); out.writeBoolean(onlyRankDocs); } @@ -145,6 +143,6 @@ protected int doHashCode() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.RANK_DOCS_RETRIEVER; + return TransportVersions.V_8_16_0; } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index b07112440d3c..d5e48a6a54da 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -493,14 +493,18 @@ public boolean containsBrokenAnalysis(String field) { */ public SearchLookup lookup() { if (this.lookup == null) { - SourceProvider sourceProvider = isSourceSynthetic() - ? SourceProvider.fromSyntheticSource(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()) - : SourceProvider.fromStoredFields(); + var sourceProvider = createSourceProvider(); setLookupProviders(sourceProvider, LeafFieldLookupProvider.fromStoredFields()); } return this.lookup; } + public SourceProvider createSourceProvider() { + return isSourceSynthetic() + ? SourceProvider.fromSyntheticSource(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()) + : SourceProvider.fromStoredFields(); + } + /** * Replace the standard source provider and field lookup provider on the SearchLookup * diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index 96e8ac35c8e3..a04f930e052b 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -21,29 +21,21 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; -import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; - -import java.util.function.Predicate; +import org.elasticsearch.index.query.SearchExecutionContext; /** Utility class to filter parent and children clauses when building nested * queries. */ public final class NestedHelper { - private final NestedLookup nestedLookup; - private final Predicate isMappedFieldPredicate; - - public NestedHelper(NestedLookup nestedLookup, Predicate isMappedFieldPredicate) { - this.nestedLookup = nestedLookup; - this.isMappedFieldPredicate = isMappedFieldPredicate; - } + private NestedHelper() {} /** Returns true if the given query might match nested documents. */ - public boolean mightMatchNestedDocs(Query query) { + public static boolean mightMatchNestedDocs(Query query, SearchExecutionContext searchExecutionContext) { if (query instanceof ConstantScoreQuery) { - return mightMatchNestedDocs(((ConstantScoreQuery) query).getQuery()); + return mightMatchNestedDocs(((ConstantScoreQuery) query).getQuery(), searchExecutionContext); } else if (query instanceof BoostQuery) { - return mightMatchNestedDocs(((BoostQuery) query).getQuery()); + return mightMatchNestedDocs(((BoostQuery) query).getQuery(), searchExecutionContext); } else if (query instanceof MatchAllDocsQuery) { return true; } else if (query instanceof MatchNoDocsQuery) { @@ -51,17 +43,17 @@ public boolean mightMatchNestedDocs(Query query) { } else if (query instanceof TermQuery) { // We only handle term(s) queries and range queries, which should already // cover a high majority of use-cases - return mightMatchNestedDocs(((TermQuery) query).getTerm().field()); + return mightMatchNestedDocs(((TermQuery) query).getTerm().field(), searchExecutionContext); } else if (query instanceof TermInSetQuery tis) { if (tis.getTermsCount() > 0) { - return mightMatchNestedDocs(tis.getField()); + return mightMatchNestedDocs(tis.getField(), searchExecutionContext); } else { return false; } } else if (query instanceof PointRangeQuery) { - return mightMatchNestedDocs(((PointRangeQuery) query).getField()); + return mightMatchNestedDocs(((PointRangeQuery) query).getField(), searchExecutionContext); } else if (query instanceof IndexOrDocValuesQuery) { - return mightMatchNestedDocs(((IndexOrDocValuesQuery) query).getIndexQuery()); + return mightMatchNestedDocs(((IndexOrDocValuesQuery) query).getIndexQuery(), searchExecutionContext); } else if (query instanceof final BooleanQuery bq) { final boolean hasRequiredClauses = bq.clauses().stream().anyMatch(BooleanClause::isRequired); if (hasRequiredClauses) { @@ -69,13 +61,13 @@ public boolean mightMatchNestedDocs(Query query) { .stream() .filter(BooleanClause::isRequired) .map(BooleanClause::query) - .allMatch(this::mightMatchNestedDocs); + .allMatch(f -> mightMatchNestedDocs(f, searchExecutionContext)); } else { return bq.clauses() .stream() .filter(c -> c.occur() == Occur.SHOULD) .map(BooleanClause::query) - .anyMatch(this::mightMatchNestedDocs); + .anyMatch(f -> mightMatchNestedDocs(f, searchExecutionContext)); } } else if (query instanceof ESToParentBlockJoinQuery) { return ((ESToParentBlockJoinQuery) query).getPath() != null; @@ -85,7 +77,7 @@ public boolean mightMatchNestedDocs(Query query) { } /** Returns true if a query on the given field might match nested documents. */ - boolean mightMatchNestedDocs(String field) { + private static boolean mightMatchNestedDocs(String field, SearchExecutionContext searchExecutionContext) { if (field.startsWith("_")) { // meta field. Every meta field behaves differently, eg. nested // documents have the same _uid as their parent, put their path in @@ -94,36 +86,36 @@ boolean mightMatchNestedDocs(String field) { // we might add a nested filter when it is nor required. return true; } - if (isMappedFieldPredicate.test(field) == false) { + if (searchExecutionContext.isFieldMapped(field) == false) { // field does not exist return false; } - return nestedLookup.getNestedParent(field) != null; + return searchExecutionContext.nestedLookup().getNestedParent(field) != null; } /** Returns true if the given query might match parent documents or documents * that are nested under a different path. */ - public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { + public static boolean mightMatchNonNestedDocs(Query query, String nestedPath, SearchExecutionContext searchExecutionContext) { if (query instanceof ConstantScoreQuery) { - return mightMatchNonNestedDocs(((ConstantScoreQuery) query).getQuery(), nestedPath); + return mightMatchNonNestedDocs(((ConstantScoreQuery) query).getQuery(), nestedPath, searchExecutionContext); } else if (query instanceof BoostQuery) { - return mightMatchNonNestedDocs(((BoostQuery) query).getQuery(), nestedPath); + return mightMatchNonNestedDocs(((BoostQuery) query).getQuery(), nestedPath, searchExecutionContext); } else if (query instanceof MatchAllDocsQuery) { return true; } else if (query instanceof MatchNoDocsQuery) { return false; } else if (query instanceof TermQuery) { - return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); + return mightMatchNonNestedDocs(searchExecutionContext, ((TermQuery) query).getTerm().field(), nestedPath); } else if (query instanceof TermInSetQuery tis) { if (tis.getTermsCount() > 0) { - return mightMatchNonNestedDocs(tis.getField(), nestedPath); + return mightMatchNonNestedDocs(searchExecutionContext, tis.getField(), nestedPath); } else { return false; } } else if (query instanceof PointRangeQuery) { - return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); + return mightMatchNonNestedDocs(searchExecutionContext, ((PointRangeQuery) query).getField(), nestedPath); } else if (query instanceof IndexOrDocValuesQuery) { - return mightMatchNonNestedDocs(((IndexOrDocValuesQuery) query).getIndexQuery(), nestedPath); + return mightMatchNonNestedDocs(((IndexOrDocValuesQuery) query).getIndexQuery(), nestedPath, searchExecutionContext); } else if (query instanceof final BooleanQuery bq) { final boolean hasRequiredClauses = bq.clauses().stream().anyMatch(BooleanClause::isRequired); if (hasRequiredClauses) { @@ -131,13 +123,13 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { .stream() .filter(BooleanClause::isRequired) .map(BooleanClause::query) - .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); + .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath, searchExecutionContext)); } else { return bq.clauses() .stream() .filter(c -> c.occur() == Occur.SHOULD) .map(BooleanClause::query) - .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); + .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath, searchExecutionContext)); } } else { return true; @@ -146,7 +138,7 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { /** Returns true if a query on the given field might match parent documents * or documents that are nested under a different path. */ - boolean mightMatchNonNestedDocs(String field, String nestedPath) { + private static boolean mightMatchNonNestedDocs(SearchExecutionContext searchExecutionContext, String field, String nestedPath) { if (field.startsWith("_")) { // meta field. Every meta field behaves differently, eg. nested // documents have the same _uid as their parent, put their path in @@ -155,9 +147,10 @@ boolean mightMatchNonNestedDocs(String field, String nestedPath) { // we might add a nested filter when it is nor required. return true; } - if (isMappedFieldPredicate.test(field) == false) { + if (searchExecutionContext.isFieldMapped(field) == false) { return false; } + var nestedLookup = searchExecutionContext.nestedLookup(); String nestedParent = nestedLookup.getNestedParent(field); if (nestedParent == null || nestedParent.startsWith(nestedPath) == false) { // the field is not a sub field of the nested path diff --git a/server/src/main/java/org/elasticsearch/index/search/stats/SearchStats.java b/server/src/main/java/org/elasticsearch/index/search/stats/SearchStats.java index ff514091979c..8b19d72ccc09 100644 --- a/server/src/main/java/org/elasticsearch/index/search/stats/SearchStats.java +++ b/server/src/main/java/org/elasticsearch/index/search/stats/SearchStats.java @@ -105,7 +105,7 @@ private Stats(StreamInput in) throws IOException { suggestTimeInMillis = in.readVLong(); suggestCurrent = in.readVLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.SEARCH_FAILURE_STATS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { queryFailure = in.readVLong(); fetchFailure = in.readVLong(); } @@ -129,7 +129,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(suggestTimeInMillis); out.writeVLong(suggestCurrent); - if (out.getTransportVersion().onOrAfter(TransportVersions.SEARCH_FAILURE_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVLong(queryFailure); out.writeVLong(fetchFailure); } diff --git a/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java b/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java index 9c666bd4a35f..ee38273f13da 100644 --- a/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/EmptySecretSettings.java @@ -44,7 +44,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_EIS_INTEGRATION_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java index ebf32f041155..53ce0bab6361 100644 --- a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java +++ b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java @@ -121,7 +121,7 @@ public ModelConfigurations(StreamInput in) throws IOException { this.service = in.readString(); this.serviceSettings = in.readNamedWriteable(ServiceSettings.class); this.taskSettings = in.readNamedWriteable(TaskSettings.class); - this.chunkingSettings = in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_CHUNKING_SETTINGS) + this.chunkingSettings = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readOptionalNamedWriteable(ChunkingSettings.class) : null; } @@ -133,7 +133,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(service); out.writeNamedWriteable(serviceSettings); out.writeNamedWriteable(taskSettings); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_CHUNKING_SETTINGS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalNamedWriteable(chunkingSettings); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/EnterpriseGeoIpTask.java b/server/src/main/java/org/elasticsearch/ingest/EnterpriseGeoIpTask.java index e696c38b9f01..ff6a687da9b4 100644 --- a/server/src/main/java/org/elasticsearch/ingest/EnterpriseGeoIpTask.java +++ b/server/src/main/java/org/elasticsearch/ingest/EnterpriseGeoIpTask.java @@ -64,7 +64,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER; + return TransportVersions.V_8_16_0; } @Override diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java index aa21e5c64d90..aadda93f977b 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java @@ -50,7 +50,6 @@ * to have all the plugin information they need prior to starting. */ public class PluginsLoader { - /** * Contains information about the {@link ClassLoader} required to load a plugin */ @@ -64,18 +63,26 @@ public interface PluginLayer { * @return The {@link ClassLoader} used to instantiate the main class for the plugin */ ClassLoader pluginClassLoader(); + + /** + * @return The {@link ModuleLayer} for the plugin modules + */ + ModuleLayer pluginModuleLayer(); } /** * Contains information about the {@link ClassLoader}s and {@link ModuleLayer} required for loading a plugin - * @param pluginBundle Information about the bundle of jars used in this plugin + * + * @param pluginBundle Information about the bundle of jars used in this plugin * @param pluginClassLoader The {@link ClassLoader} used to instantiate the main class for the plugin - * @param spiClassLoader The exported {@link ClassLoader} visible to other Java modules - * @param spiModuleLayer The exported {@link ModuleLayer} visible to other Java modules + * @param pluginModuleLayer The {@link ModuleLayer} containing the Java modules of the plugin + * @param spiClassLoader The exported {@link ClassLoader} visible to other Java modules + * @param spiModuleLayer The exported {@link ModuleLayer} visible to other Java modules */ private record LoadedPluginLayer( PluginBundle pluginBundle, ClassLoader pluginClassLoader, + ModuleLayer pluginModuleLayer, ClassLoader spiClassLoader, ModuleLayer spiModuleLayer ) implements PluginLayer { @@ -103,6 +110,10 @@ public record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { public static LayerAndLoader ofLoader(ClassLoader loader) { return new LayerAndLoader(ModuleLayer.boot(), loader); } + + public static LayerAndLoader ofUberModuleLoader(UberModuleClassLoader loader) { + return new LayerAndLoader(loader.getLayer(), loader); + } } private static final Logger logger = LogManager.getLogger(PluginsLoader.class); @@ -111,6 +122,7 @@ public static LayerAndLoader ofLoader(ClassLoader loader) { private final List moduleDescriptors; private final List pluginDescriptors; private final Map loadedPluginLayers; + private final Set allBundles; /** * Constructs a new PluginsLoader @@ -185,17 +197,19 @@ public static PluginsLoader createPluginsLoader(Path modulesDirectory, Path plug } } - return new PluginsLoader(moduleDescriptors, pluginDescriptors, loadedPluginLayers); + return new PluginsLoader(moduleDescriptors, pluginDescriptors, loadedPluginLayers, Set.copyOf(seenBundles)); } PluginsLoader( List moduleDescriptors, List pluginDescriptors, - Map loadedPluginLayers + Map loadedPluginLayers, + Set allBundles ) { this.moduleDescriptors = moduleDescriptors; this.pluginDescriptors = pluginDescriptors; this.loadedPluginLayers = loadedPluginLayers; + this.allBundles = allBundles; } public List moduleDescriptors() { @@ -210,6 +224,10 @@ public Stream pluginLayers() { return loadedPluginLayers.values().stream().map(Function.identity()); } + public Set allBundles() { + return allBundles; + } + private static void loadPluginLayer( PluginBundle bundle, Map loaded, @@ -239,7 +257,7 @@ private static void loadPluginLayer( } final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); - final LayerAndLoader pluginLayerAndLoader = createPlugin( + final LayerAndLoader pluginLayerAndLoader = createPluginLayerAndLoader( bundle, pluginParentLoader, extendedPlugins, @@ -253,7 +271,16 @@ private static void loadPluginLayer( spiLayerAndLoader = pluginLayerAndLoader; } - loaded.put(name, new LoadedPluginLayer(bundle, pluginClassLoader, spiLayerAndLoader.loader, spiLayerAndLoader.layer)); + loaded.put( + name, + new LoadedPluginLayer( + bundle, + pluginClassLoader, + pluginLayerAndLoader.layer(), + spiLayerAndLoader.loader, + spiLayerAndLoader.layer + ) + ); } static LayerAndLoader createSPI( @@ -277,7 +304,7 @@ static LayerAndLoader createSPI( } } - static LayerAndLoader createPlugin( + private static LayerAndLoader createPluginLayerAndLoader( PluginBundle bundle, ClassLoader pluginParentLoader, List extendedPlugins, @@ -294,7 +321,7 @@ static LayerAndLoader createPlugin( return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); } else if (plugin.isStable()) { logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); - return LayerAndLoader.ofLoader( + return LayerAndLoader.ofUberModuleLoader( UberModuleClassLoader.getInstance( pluginParentLoader, ModuleLayer.boot(), diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index a09fcbd0c527..7e4d23db7028 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; @@ -40,9 +39,6 @@ */ @ServerlessScope(Scope.PUBLIC) public class RestGetSourceAction extends BaseRestHandler { - private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetSourceAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source " - + "requests is deprecated."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java index 65aa1869a41e..9d39bf7f343c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -28,8 +27,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestMultiTermVectorsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiTermVectorsAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in multi term vector requests is deprecated."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index c1a55874bfc5..b0e08b376f9d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -36,8 +35,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestCountAction extends BaseRestHandler { - private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCountAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in count requests is deprecated."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index ff062084a3cb..a9c2ff7576b0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; import org.elasticsearch.features.NodeFeature; @@ -56,8 +55,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestSearchAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSearchAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in search requests is deprecated."; /** * Indicates whether hits.total should be rendered as an integer or an object diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 8ac35f7c40ca..b87d097413b6 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -444,10 +444,9 @@ public void preProcess() { public Query buildFilteredQuery(Query query) { List filters = new ArrayList<>(); NestedLookup nestedLookup = searchExecutionContext.nestedLookup(); - NestedHelper nestedHelper = new NestedHelper(nestedLookup, searchExecutionContext::isFieldMapped); if (nestedLookup != NestedLookup.EMPTY - && nestedHelper.mightMatchNestedDocs(query) - && (aliasFilter == null || nestedHelper.mightMatchNestedDocs(aliasFilter))) { + && NestedHelper.mightMatchNestedDocs(query, searchExecutionContext) + && (aliasFilter == null || NestedHelper.mightMatchNestedDocs(aliasFilter, searchExecutionContext))) { filters.add(Queries.newNonNestedFilter(searchExecutionContext.indexVersionCreated())); } diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index a1e8eb25f478..f8d161ef1f5e 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -263,7 +263,7 @@ private DateTime(DateFormatter formatter, ZoneId timeZone, DateFieldMapper.Resol private DateTime(StreamInput in) throws IOException { String formatterPattern = in.readString(); - Locale locale = in.getTransportVersion().onOrAfter(TransportVersions.DATE_TIME_DOC_VALUES_LOCALES) + Locale locale = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? LocaleUtils.parse(in.readString()) : DateFieldMapper.DEFAULT_LOCALE; String zoneId = in.readString(); @@ -297,7 +297,7 @@ public static DateTime readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(formatter.pattern()); - if (out.getTransportVersion().onOrAfter(TransportVersions.DATE_TIME_DOC_VALUES_LOCALES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeString(formatter.locale().toString()); } out.writeString(timeZone.getId()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java index 2d360705f75b..c412ecb5d636 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; +import org.elasticsearch.search.aggregations.bucket.terms.BucketAndOrd; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.xcontent.ToXContentObject; @@ -20,13 +21,12 @@ import java.util.Comparator; import java.util.List; import java.util.function.BiFunction; -import java.util.function.ToLongFunction; /** * {@link Bucket} ordering strategy. Buckets can be order either as * "complete" buckets using {@link #comparator()} or against a combination * of the buckets internals with its ordinal with - * {@link #partiallyBuiltBucketComparator(ToLongFunction, Aggregator)}. + * {@link #partiallyBuiltBucketComparator(Aggregator)}. */ public abstract class BucketOrder implements ToXContentObject, Writeable { /** @@ -102,7 +102,7 @@ public final void validate(Aggregator aggregator) throws AggregationExecutionExc * to validate this order because doing so checks all of the appropriate * paths. */ - partiallyBuiltBucketComparator(null, aggregator); + partiallyBuiltBucketComparator(aggregator); } /** @@ -121,7 +121,7 @@ public final void validate(Aggregator aggregator) throws AggregationExecutionExc * with it all the time. *

*/ - public abstract Comparator partiallyBuiltBucketComparator(ToLongFunction ordinalReader, Aggregator aggregator); + public abstract Comparator> partiallyBuiltBucketComparator(Aggregator aggregator); /** * Build a comparator for fully built buckets. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java index b2ca4a10dc4b..3593eb5adf7e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.search.aggregations.Aggregator.BucketComparator; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; +import org.elasticsearch.search.aggregations.bucket.terms.BucketAndOrd; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortValue; @@ -30,7 +31,6 @@ import java.util.List; import java.util.Objects; import java.util.function.BiFunction; -import java.util.function.ToLongFunction; /** * Implementations for {@link Bucket} ordering strategies. @@ -63,10 +63,10 @@ public AggregationPath path() { } @Override - public Comparator partiallyBuiltBucketComparator(ToLongFunction ordinalReader, Aggregator aggregator) { + public Comparator> partiallyBuiltBucketComparator(Aggregator aggregator) { try { BucketComparator bucketComparator = path.bucketComparator(aggregator, order); - return (lhs, rhs) -> bucketComparator.compare(ordinalReader.applyAsLong(lhs), ordinalReader.applyAsLong(rhs)); + return (lhs, rhs) -> bucketComparator.compare(lhs.ord, rhs.ord); } catch (IllegalArgumentException e) { throw new AggregationExecutionException.InvalidPath("Invalid aggregation order path [" + path + "]. " + e.getMessage(), e); } @@ -188,12 +188,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public Comparator partiallyBuiltBucketComparator(ToLongFunction ordinalReader, Aggregator aggregator) { - List> comparators = orderElements.stream() - .map(oe -> oe.partiallyBuiltBucketComparator(ordinalReader, aggregator)) - .toList(); + public Comparator> partiallyBuiltBucketComparator(Aggregator aggregator) { + List>> comparators = new ArrayList<>(orderElements.size()); + for (BucketOrder order : orderElements) { + comparators.add(order.partiallyBuiltBucketComparator(aggregator)); + } return (lhs, rhs) -> { - for (Comparator c : comparators) { + for (Comparator> c : comparators) { int result = c.compare(lhs, rhs); if (result != 0) { return result; @@ -299,9 +300,9 @@ byte id() { } @Override - public Comparator partiallyBuiltBucketComparator(ToLongFunction ordinalReader, Aggregator aggregator) { + public Comparator> partiallyBuiltBucketComparator(Aggregator aggregator) { Comparator comparator = comparator(); - return comparator::compare; + return (lhs, rhs) -> comparator.compare(lhs.bucket, rhs.bucket); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index 344b90b06c4f..571ce3a9a451 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; @@ -26,6 +27,7 @@ import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; +import org.elasticsearch.search.aggregations.bucket.terms.BucketAndOrd; import org.elasticsearch.search.aggregations.bucket.terms.BucketPriorityQueue; import org.elasticsearch.search.aggregations.bucket.terms.BytesKeyedBucketOrds; import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms; @@ -38,7 +40,6 @@ import java.util.Arrays; import java.util.Map; import java.util.function.BiConsumer; -import java.util.function.Supplier; import static java.util.Collections.emptyList; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; @@ -115,51 +116,57 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size()); ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()) ) { - for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { - int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); - - // as users can't control sort order, in practice we'll always sort by doc count descending - try ( - BucketPriorityQueue ordered = new BucketPriorityQueue<>( - size, - bigArrays(), - partiallyBuiltBucketComparator - ) - ) { - StringTerms.Bucket spare = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); - Supplier emptyBucketBuilder = () -> new StringTerms.Bucket( - new BytesRef(), - 0, - null, - false, - 0, - format - ); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts.increment(ordIdx, docCount); - if (spare == null) { - checkRealMemoryCBForInternalBucket(); - spare = emptyBucketBuilder.get(); + try (IntArray bucketsToCollect = bigArrays().newIntArray(owningBucketOrds.size())) { + // find how many buckets we are going to collect + long ordsToCollect = 0; + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)), bucketCountThresholds.getShardSize()); + bucketsToCollect.set(ordIdx, size); + ordsToCollect += size; + } + try (LongArray ordsArray = bigArrays().newLongArray(ordsToCollect)) { + long ordsCollected = 0; + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + // as users can't control sort order, in practice we'll always sort by doc count descending + try ( + BucketPriorityQueue ordered = new BucketPriorityQueue<>( + bucketsToCollect.get(ordIdx), + bigArrays(), + order.partiallyBuiltBucketComparator(this) + ) + ) { + BucketAndOrd spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = new BucketAndOrd<>(new StringTerms.Bucket(new BytesRef(), 0, null, false, 0, format)); + } + ordsEnum.readValue(spare.bucket.getTermBytes()); + spare.bucket.setDocCount(docCount); + spare.ord = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } + final int orderedSize = (int) ordered.size(); + final StringTerms.Bucket[] buckets = new StringTerms.Bucket[orderedSize]; + for (int i = orderedSize - 1; i >= 0; --i) { + BucketAndOrd bucketAndOrd = ordered.pop(); + buckets[i] = bucketAndOrd.bucket; + ordsArray.set(ordsCollected + i, bucketAndOrd.ord); + otherDocCounts.increment(ordIdx, -bucketAndOrd.bucket.getDocCount()); + bucketAndOrd.bucket.setTermBytes(BytesRef.deepCopyOf(bucketAndOrd.bucket.getTermBytes())); + } + topBucketsPerOrd.set(ordIdx, buckets); + ordsCollected += orderedSize; } - ordsEnum.readValue(spare.getTermBytes()); - spare.setDocCount(docCount); - spare.setBucketOrd(ordsEnum.ord()); - spare = ordered.insertWithOverflow(spare); - } - - topBucketsPerOrd.set(ordIdx, new StringTerms.Bucket[(int) ordered.size()]); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); - otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[i].getDocCount()); - topBucketsPerOrd.get(ordIdx)[i].setTermBytes(BytesRef.deepCopyOf(topBucketsPerOrd.get(ordIdx)[i].getTermBytes())); } + assert ordsCollected == ordsArray.size(); + buildSubAggsForAllBuckets(topBucketsPerOrd, ordsArray, InternalTerms.Bucket::setAggregations); } } - buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); - return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { final BucketOrder reduceOrder; if (isKeyOrder(order) == false) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketPriorityQueue.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketPriorityQueue.java index 7f8e5c8c885f..9550003a5bd1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketPriorityQueue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketPriorityQueue.java @@ -13,17 +13,17 @@ import java.util.Comparator; -public class BucketPriorityQueue extends ObjectArrayPriorityQueue { +public class BucketPriorityQueue extends ObjectArrayPriorityQueue> { - private final Comparator comparator; + private final Comparator> comparator; - public BucketPriorityQueue(int size, BigArrays bigArrays, Comparator comparator) { + public BucketPriorityQueue(int size, BigArrays bigArrays, Comparator> comparator) { super(size, bigArrays); this.comparator = comparator; } @Override - protected boolean lessThan(B a, B b) { + protected boolean lessThan(BucketAndOrd a, BucketAndOrd b) { return comparator.compare(a, b) > 0; // reverse, since we reverse again when adding to a list } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketSignificancePriorityQueue.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketSignificancePriorityQueue.java index fe751c9e7918..4736f52d9362 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketSignificancePriorityQueue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketSignificancePriorityQueue.java @@ -12,14 +12,14 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; -public class BucketSignificancePriorityQueue extends ObjectArrayPriorityQueue { +public class BucketSignificancePriorityQueue extends ObjectArrayPriorityQueue> { public BucketSignificancePriorityQueue(int size, BigArrays bigArrays) { super(size, bigArrays); } @Override - protected boolean lessThan(SignificantTerms.Bucket o1, SignificantTerms.Bucket o2) { - return o1.getSignificanceScore() < o2.getSignificanceScore(); + protected boolean lessThan(BucketAndOrd o1, BucketAndOrd o2) { + return o1.bucket.getSignificanceScore() < o2.bucket.getSignificanceScore(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 0ec03a6f56dd..439b61cc43dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -20,6 +20,7 @@ import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.util.ObjectArray; @@ -561,10 +562,10 @@ InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOExc ) { GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; final int size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); - try (ObjectArrayPriorityQueue ordered = collectionStrategy.buildPriorityQueue(size)) { + try (ObjectArrayPriorityQueue> ordered = collectionStrategy.buildPriorityQueue(size)) { BucketUpdater updater = collectionStrategy.bucketUpdater(0, lookupGlobalOrd); collect(new BucketInfoConsumer() { - TB spare = null; + BucketAndOrd spare = null; @Override public void accept(long globalOrd, long bucketOrd, long docCount) throws IOException { @@ -572,24 +573,31 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep if (docCount >= bucketCountThresholds.getShardMinDocCount()) { if (spare == null) { checkRealMemoryCBForInternalBucket(); - spare = collectionStrategy.buildEmptyTemporaryBucket(); + spare = new BucketAndOrd<>(collectionStrategy.buildEmptyTemporaryBucket()); } - updater.updateBucket(spare, globalOrd, bucketOrd, docCount); + spare.ord = bucketOrd; + updater.updateBucket(spare.bucket, globalOrd, docCount); spare = ordered.insertWithOverflow(spare); } } }); // Get the top buckets - topBucketsPreOrd.set(0, collectionStrategy.buildBuckets((int) ordered.size())); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - checkRealMemoryCBForInternalBucket(); - B bucket = collectionStrategy.convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); - topBucketsPreOrd.get(0)[i] = bucket; - otherDocCount.increment(0, -bucket.getDocCount()); + int orderedSize = (int) ordered.size(); + try (LongArray ordsArray = bigArrays().newLongArray(orderedSize)) { + B[] buckets = collectionStrategy.buildBuckets(orderedSize); + for (int i = orderedSize - 1; i >= 0; --i) { + checkRealMemoryCBForInternalBucket(); + BucketAndOrd bucketAndOrd = ordered.pop(); + B bucket = collectionStrategy.convertTempBucketToRealBucket(bucketAndOrd.bucket, lookupGlobalOrd); + ordsArray.set(i, bucketAndOrd.ord); + buckets[i] = bucket; + otherDocCount.increment(0, -bucket.getDocCount()); + } + topBucketsPreOrd.set(0, buckets); + collectionStrategy.buildSubAggs(topBucketsPreOrd, ordsArray); } } - collectionStrategy.buildSubAggs(topBucketsPreOrd); return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( Math.toIntExact(owningBucketOrds.size()), ordIdx -> collectionStrategy.buildResult( @@ -710,39 +718,61 @@ InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOExc LongArray otherDocCount = bigArrays().newLongArray(owningBucketOrds.size(), true); ObjectArray topBucketsPreOrd = collectionStrategy.buildTopBucketsPerOrd(owningBucketOrds.size()) ) { - GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; - for (long ordIdx = 0; ordIdx < topBucketsPreOrd.size(); ordIdx++) { - long owningBucketOrd = owningBucketOrds.get(ordIdx); - collectZeroDocEntriesIfNeeded(owningBucketOrds.get(ordIdx)); - int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrd), bucketCountThresholds.getShardSize()); - try (ObjectArrayPriorityQueue ordered = collectionStrategy.buildPriorityQueue(size)) { - BucketUpdater updater = collectionStrategy.bucketUpdater(owningBucketOrd, lookupGlobalOrd); - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - TB spare = null; - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCount.increment(ordIdx, docCount); - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - checkRealMemoryCBForInternalBucket(); - spare = collectionStrategy.buildEmptyTemporaryBucket(); + try (IntArray bucketsToCollect = bigArrays().newIntArray(owningBucketOrds.size())) { + long ordsToCollect = 0; + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningBucketOrd); + final int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrd), bucketCountThresholds.getShardSize()); + ordsToCollect += size; + bucketsToCollect.set(ordIdx, size); + } + try (LongArray ordsArray = bigArrays().newLongArray(ordsToCollect)) { + long ordsCollected = 0; + GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; + for (long ordIdx = 0; ordIdx < topBucketsPreOrd.size(); ordIdx++) { + long owningBucketOrd = owningBucketOrds.get(ordIdx); + try ( + ObjectArrayPriorityQueue> ordered = collectionStrategy.buildPriorityQueue( + bucketsToCollect.get(ordIdx) + ) + ) { + BucketUpdater updater = collectionStrategy.bucketUpdater(owningBucketOrd, lookupGlobalOrd); + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + BucketAndOrd spare = null; + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCount.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = new BucketAndOrd<>(collectionStrategy.buildEmptyTemporaryBucket()); + } + updater.updateBucket(spare.bucket, ordsEnum.value(), docCount); + spare.ord = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } + // Get the top buckets + int orderedSize = (int) ordered.size(); + B[] buckets = collectionStrategy.buildBuckets(orderedSize); + for (int i = orderedSize - 1; i >= 0; --i) { + checkRealMemoryCBForInternalBucket(); + BucketAndOrd bucketAndOrd = ordered.pop(); + B bucket = collectionStrategy.convertTempBucketToRealBucket(bucketAndOrd.bucket, lookupGlobalOrd); + ordsArray.set(ordsCollected + i, bucketAndOrd.ord); + buckets[i] = bucket; + otherDocCount.increment(ordIdx, -bucket.getDocCount()); + } + topBucketsPreOrd.set(ordIdx, buckets); + ordsCollected += orderedSize; } - updater.updateBucket(spare, ordsEnum.value(), ordsEnum.ord(), docCount); - spare = ordered.insertWithOverflow(spare); - } - // Get the top buckets - topBucketsPreOrd.set(ordIdx, collectionStrategy.buildBuckets((int) ordered.size())); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - checkRealMemoryCBForInternalBucket(); - B bucket = collectionStrategy.convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); - topBucketsPreOrd.get(ordIdx)[i] = bucket; - otherDocCount.increment(ordIdx, -bucket.getDocCount()); } + assert ordsCollected == ordsArray.size(); + collectionStrategy.buildSubAggs(topBucketsPreOrd, ordsArray); } } - collectionStrategy.buildSubAggs(topBucketsPreOrd); return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( Math.toIntExact(owningBucketOrds.size()), ordIdx -> collectionStrategy.buildResult( @@ -791,7 +821,7 @@ abstract class ResultStrategy< * Build a {@link PriorityQueue} to sort the buckets. After we've * collected all of the buckets we'll collect all entries in the queue. */ - abstract ObjectArrayPriorityQueue buildPriorityQueue(int size); + abstract ObjectArrayPriorityQueue> buildPriorityQueue(int size); /** * Build an array to hold the "top" buckets for each ordinal. @@ -813,7 +843,7 @@ abstract class ResultStrategy< * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPreOrd, LongArray ordsArray) throws IOException; /** * Turn the buckets into an aggregation result. @@ -834,7 +864,7 @@ abstract class ResultStrategy< } interface BucketUpdater { - void updateBucket(TB spare, long globalOrd, long bucketOrd, long docCount) throws IOException; + void updateBucket(TB spare, long globalOrd, long docCount) throws IOException; } /** @@ -868,29 +898,30 @@ OrdBucket buildEmptyTemporaryBucket() { @Override BucketUpdater bucketUpdater(long owningBucketOrd, GlobalOrdLookupFunction lookupGlobalOrd) { - return (spare, globalOrd, bucketOrd, docCount) -> { + return (spare, globalOrd, docCount) -> { spare.globalOrd = globalOrd; - spare.bucketOrd = bucketOrd; spare.docCount = docCount; }; } @Override - ObjectArrayPriorityQueue buildPriorityQueue(int size) { - return new BucketPriorityQueue<>(size, bigArrays(), partiallyBuiltBucketComparator); + ObjectArrayPriorityQueue> buildPriorityQueue(int size) { + return new BucketPriorityQueue<>( + size, + bigArrays(), + order.partiallyBuiltBucketComparator(GlobalOrdinalsStringTermsAggregator.this) + ); } @Override StringTerms.Bucket convertTempBucketToRealBucket(OrdBucket temp, GlobalOrdLookupFunction lookupGlobalOrd) throws IOException { BytesRef term = BytesRef.deepCopyOf(lookupGlobalOrd.apply(temp.globalOrd)); - StringTerms.Bucket result = new StringTerms.Bucket(term, temp.docCount, null, showTermDocCountError, 0, format); - result.bucketOrd = temp.bucketOrd; - return result; + return new StringTerms.Bucket(term, temp.docCount, null, showTermDocCountError, 0, format); } @Override - void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException { - buildSubAggsForAllBuckets(topBucketsPreOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + void buildSubAggs(ObjectArray topBucketsPreOrd, LongArray ordsArray) throws IOException { + buildSubAggsForAllBuckets(topBucketsPreOrd, ordsArray, (b, aggs) -> b.aggregations = aggs); } @Override @@ -1005,8 +1036,7 @@ private long subsetSize(long owningBucketOrd) { @Override BucketUpdater bucketUpdater(long owningBucketOrd, GlobalOrdLookupFunction lookupGlobalOrd) { long subsetSize = subsetSize(owningBucketOrd); - return (spare, globalOrd, bucketOrd, docCount) -> { - spare.bucketOrd = bucketOrd; + return (spare, globalOrd, docCount) -> { oversizedCopy(lookupGlobalOrd.apply(globalOrd), spare.termBytes); spare.subsetDf = docCount; spare.supersetDf = backgroundFrequencies.freq(spare.termBytes); @@ -1020,7 +1050,7 @@ BucketUpdater bucketUpdater(long owningBucketOrd, } @Override - ObjectArrayPriorityQueue buildPriorityQueue(int size) { + ObjectArrayPriorityQueue> buildPriorityQueue(int size) { return new BucketSignificancePriorityQueue<>(size, bigArrays()); } @@ -1033,8 +1063,8 @@ SignificantStringTerms.Bucket convertTempBucketToRealBucket( } @Override - void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException { - buildSubAggsForAllBuckets(topBucketsPreOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + void buildSubAggs(ObjectArray topBucketsPreOrd, LongArray ordsArray) throws IOException { + buildSubAggsForAllBuckets(topBucketsPreOrd, ordsArray, (b, aggs) -> b.aggregations = aggs); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 78ae2481f5d9..5108793b8a80 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -10,12 +10,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -58,12 +58,6 @@ public interface Reader> { long subsetDf; long supersetDf; - /** - * Ordinal of the bucket while it is being built. Not used after it is - * returned from {@link Aggregator#buildAggregations(org.elasticsearch.common.util.LongArray)} and not - * serialized. - */ - transient long bucketOrd; double score; protected InternalAggregations aggregations; final transient DocValueFormat format; @@ -235,7 +229,12 @@ canLeadReduction here is essentially checking if this shard returned data. Unma public InternalAggregation get() { final SignificanceHeuristic heuristic = getSignificanceHeuristic().rewrite(reduceContext); final int size = (int) (reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size())); - try (BucketSignificancePriorityQueue ordered = new BucketSignificancePriorityQueue<>(size, reduceContext.bigArrays())) { + try (ObjectArrayPriorityQueue ordered = new ObjectArrayPriorityQueue(size, reduceContext.bigArrays()) { + @Override + protected boolean lessThan(B a, B b) { + return a.getSignificanceScore() < b.getSignificanceScore(); + } + }) { buckets.forEach(entry -> { final B b = createBucket( entry.value.subsetDf[0], diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 739f0b923eaa..de35046691b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -38,8 +38,6 @@ public interface Reader> { B read(StreamInput in, DocValueFormat format, boolean showDocCountError) throws IOException; } - long bucketOrd; - protected long docCount; private long docCountError; protected InternalAggregations aggregations; @@ -88,14 +86,6 @@ public void setDocCount(long docCount) { this.docCount = docCount; } - public long getBucketOrd() { - return bucketOrd; - } - - public void setBucketOrd(long bucketOrd) { - this.bucketOrd = bucketOrd; - } - @Override public long getDocCountError() { return docCountError; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index b96c495d3748..026912a583ef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -17,6 +17,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; @@ -43,6 +44,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Comparator; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Function; @@ -287,40 +289,55 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); ObjectArray topBucketsPerOrd = buildTopBucketsPerOrd(Math.toIntExact(owningBucketOrds.size())) ) { - for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { - long owningOrd = owningBucketOrds.get(ordIdx); - collectZeroDocEntriesIfNeeded(owningOrd, excludeDeletedDocs); - int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); - - try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { - B spare = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningOrd); - BucketUpdater bucketUpdater = bucketUpdater(owningOrd); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts.increment(ordIdx, docCount); - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - checkRealMemoryCBForInternalBucket(); - spare = buildEmptyBucket(); + try (IntArray bucketsToCollect = bigArrays().newIntArray(owningBucketOrds.size())) { + long ordsToCollect = 0; + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningBucketOrd, excludeDeletedDocs); + final int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrd), bucketCountThresholds.getShardSize()); + ordsToCollect += size; + bucketsToCollect.set(ordIdx, size); + } + try (LongArray ordsArray = bigArrays().newLongArray(ordsToCollect)) { + long ordsCollected = 0; + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + long owningOrd = owningBucketOrds.get(ordIdx); + try (ObjectArrayPriorityQueue> ordered = buildPriorityQueue(bucketsToCollect.get(ordIdx))) { + BucketAndOrd spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningOrd); + BucketUpdater bucketUpdater = bucketUpdater(owningOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = new BucketAndOrd<>(buildEmptyBucket()); + } + bucketUpdater.updateBucket(spare.bucket, ordsEnum, docCount); + spare.ord = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } + + final int orderedSize = (int) ordered.size(); + final B[] buckets = buildBuckets(orderedSize); + for (int i = orderedSize - 1; i >= 0; --i) { + BucketAndOrd bucketAndOrd = ordered.pop(); + finalizeBucket(bucketAndOrd.bucket); + buckets[i] = bucketAndOrd.bucket; + ordsArray.set(ordsCollected + i, bucketAndOrd.ord); + otherDocCounts.increment(ordIdx, -bucketAndOrd.bucket.getDocCount()); + } + topBucketsPerOrd.set(ordIdx, buckets); + ordsCollected += orderedSize; } - bucketUpdater.updateBucket(spare, ordsEnum, docCount); - spare = ordered.insertWithOverflow(spare); - } - - topBucketsPerOrd.set(ordIdx, buildBuckets((int) ordered.size())); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); - otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[i].getDocCount()); - finalizeBucket(topBucketsPerOrd.get(ordIdx)[i]); } + assert ordsCollected == ordsArray.size(); + buildSubAggs(topBucketsPerOrd, ordsArray); } } - - buildSubAggs(topBucketsPerOrd); - return MapStringTermsAggregator.this.buildAggregations( Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) @@ -355,7 +372,7 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro * Build a {@link PriorityQueue} to sort the buckets. After we've * collected all of the buckets we'll collect all entries in the queue. */ - abstract ObjectArrayPriorityQueue buildPriorityQueue(int size); + abstract ObjectArrayPriorityQueue> buildPriorityQueue(int size); /** * Update fields in {@code spare} to reflect information collected for @@ -382,9 +399,9 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro /** * Build the sub-aggregations into the buckets. This will usually - * delegate to {@link #buildSubAggsForAllBuckets}. + * delegate to {@link #buildSubAggsForAllBuckets(ObjectArray, LongArray, BiConsumer)}. */ - abstract void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPerOrd, LongArray ordsArray) throws IOException; /** * Turn the buckets into an aggregation result. @@ -407,9 +424,11 @@ interface BucketUpdater */ class StandardTermsResults extends ResultStrategy { private final ValuesSource valuesSource; + private final Comparator> comparator; - StandardTermsResults(ValuesSource valuesSource) { + StandardTermsResults(ValuesSource valuesSource, Aggregator aggregator) { this.valuesSource = valuesSource; + this.comparator = order.partiallyBuiltBucketComparator(aggregator); } @Override @@ -498,8 +517,8 @@ StringTerms.Bucket buildEmptyBucket() { } @Override - ObjectArrayPriorityQueue buildPriorityQueue(int size) { - return new BucketPriorityQueue<>(size, bigArrays(), partiallyBuiltBucketComparator); + ObjectArrayPriorityQueue> buildPriorityQueue(int size) { + return new BucketPriorityQueue<>(size, bigArrays(), comparator); } @Override @@ -507,7 +526,6 @@ BucketUpdater bucketUpdater(long owningBucketOrd) { return (spare, ordsEnum, docCount) -> { ordsEnum.readValue(spare.termBytes); spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); }; } @@ -532,8 +550,8 @@ void finalizeBucket(StringTerms.Bucket bucket) { } @Override - void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); + void buildSubAggs(ObjectArray topBucketsPerOrd, LongArray ordArray) throws IOException { + buildSubAggsForAllBuckets(topBucketsPerOrd, ordArray, (b, a) -> b.aggregations = a); } @Override @@ -625,7 +643,7 @@ SignificantStringTerms.Bucket buildEmptyBucket() { } @Override - ObjectArrayPriorityQueue buildPriorityQueue(int size) { + ObjectArrayPriorityQueue> buildPriorityQueue(int size) { return new BucketSignificancePriorityQueue<>(size, bigArrays()); } @@ -634,7 +652,6 @@ BucketUpdater bucketUpdater(long owningBucketOrd) long subsetSize = subsetSizes.get(owningBucketOrd); return (spare, ordsEnum, docCount) -> { ordsEnum.readValue(spare.termBytes); - spare.bucketOrd = ordsEnum.ord(); spare.subsetDf = docCount; spare.supersetDf = backgroundFrequencies.freq(spare.termBytes); /* @@ -667,8 +684,8 @@ void finalizeBucket(SignificantStringTerms.Bucket bucket) { } @Override - void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); + void buildSubAggs(ObjectArray topBucketsPerOrd, LongArray ordsArray) throws IOException { + buildSubAggsForAllBuckets(topBucketsPerOrd, ordsArray, (b, a) -> b.aggregations = a); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index 5d4c15d8a3b8..a54053f712f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; @@ -40,6 +41,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Comparator; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Function; @@ -167,42 +169,56 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); ObjectArray topBucketsPerOrd = buildTopBucketsPerOrd(owningBucketOrds.size()) ) { - for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { - final long owningBucketOrd = owningBucketOrds.get(ordIdx); - collectZeroDocEntriesIfNeeded(owningBucketOrd, excludeDeletedDocs); - long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); - - int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); - try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { - B spare = null; - BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - BucketUpdater bucketUpdater = bucketUpdater(owningBucketOrd); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts.increment(ordIdx, docCount); - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - checkRealMemoryCBForInternalBucket(); - spare = buildEmptyBucket(); - } - bucketUpdater.updateBucket(spare, ordsEnum, docCount); - spare = ordered.insertWithOverflow(spare); - } + try (IntArray bucketsToCollect = bigArrays().newIntArray(owningBucketOrds.size())) { + long ordsToCollect = 0; + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningBucketOrd, excludeDeletedDocs); + int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrd), bucketCountThresholds.getShardSize()); + bucketsToCollect.set(ordIdx, size); + ordsToCollect += size; + } + try (LongArray ordsArray = bigArrays().newLongArray(ordsToCollect)) { + long ordsCollected = 0; + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + try (ObjectArrayPriorityQueue> ordered = buildPriorityQueue(bucketsToCollect.get(ordIdx))) { + BucketAndOrd spare = null; + BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + BucketUpdater bucketUpdater = bucketUpdater(owningBucketOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = new BucketAndOrd<>(buildEmptyBucket()); + } + bucketUpdater.updateBucket(spare.bucket, ordsEnum, docCount); + spare.ord = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } + + // Get the top buckets + final int orderedSize = (int) ordered.size(); + final B[] bucketsForOrd = buildBuckets(orderedSize); + for (int b = orderedSize - 1; b >= 0; --b) { + BucketAndOrd bucketAndOrd = ordered.pop(); + bucketsForOrd[b] = bucketAndOrd.bucket; + ordsArray.set(ordsCollected + b, bucketAndOrd.ord); + otherDocCounts.increment(ordIdx, -bucketAndOrd.bucket.getDocCount()); + } + topBucketsPerOrd.set(ordIdx, bucketsForOrd); + ordsCollected += orderedSize; - // Get the top buckets - B[] bucketsForOrd = buildBuckets((int) ordered.size()); - topBucketsPerOrd.set(ordIdx, bucketsForOrd); - for (int b = (int) ordered.size() - 1; b >= 0; --b) { - topBucketsPerOrd.get(ordIdx)[b] = ordered.pop(); - otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[b].getDocCount()); + } } + assert ordsCollected == ordsArray.size(); + buildSubAggs(topBucketsPerOrd, ordsArray); } } - - buildSubAggs(topBucketsPerOrd); - return NumericTermsAggregator.this.buildAggregations( Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) @@ -254,13 +270,13 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro * Build a {@link ObjectArrayPriorityQueue} to sort the buckets. After we've * collected all of the buckets we'll collect all entries in the queue. */ - abstract ObjectArrayPriorityQueue buildPriorityQueue(int size); + abstract ObjectArrayPriorityQueue> buildPriorityQueue(int size); /** * Build the sub-aggregations into the buckets. This will usually - * delegate to {@link #buildSubAggsForAllBuckets}. + * delegate to {@link #buildSubAggsForAllBuckets(ObjectArray, LongArray, BiConsumer)}. */ - abstract void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPerOrd, LongArray ordsArray) throws IOException; /** * Collect extra entries for "zero" hit documents if they were requested @@ -287,9 +303,11 @@ interface BucketUpdater abstract class StandardTermsResultStrategy, B extends InternalTerms.Bucket> extends ResultStrategy { protected final boolean showTermDocCountError; + private final Comparator> comparator; - StandardTermsResultStrategy(boolean showTermDocCountError) { + StandardTermsResultStrategy(boolean showTermDocCountError, Aggregator aggregator) { this.showTermDocCountError = showTermDocCountError; + this.comparator = order.partiallyBuiltBucketComparator(aggregator); } @Override @@ -298,13 +316,13 @@ final LeafBucketCollector wrapCollector(LeafBucketCollector primary) { } @Override - final ObjectArrayPriorityQueue buildPriorityQueue(int size) { - return new BucketPriorityQueue<>(size, bigArrays(), partiallyBuiltBucketComparator); + final ObjectArrayPriorityQueue> buildPriorityQueue(int size) { + return new BucketPriorityQueue<>(size, bigArrays(), comparator); } @Override - final void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + final void buildSubAggs(ObjectArray topBucketsPerOrd, LongArray ordsArray) throws IOException { + buildSubAggsForAllBuckets(topBucketsPerOrd, ordsArray, (b, aggs) -> b.aggregations = aggs); } @Override @@ -340,8 +358,8 @@ public final void close() {} } class LongTermsResults extends StandardTermsResultStrategy { - LongTermsResults(boolean showTermDocCountError) { - super(showTermDocCountError); + LongTermsResults(boolean showTermDocCountError, Aggregator aggregator) { + super(showTermDocCountError, aggregator); } @Override @@ -374,7 +392,6 @@ BucketUpdater bucketUpdater(long owningBucketOrd) { return (LongTerms.Bucket spare, BucketOrdsEnum ordsEnum, long docCount) -> { spare.term = ordsEnum.value(); spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); }; } @@ -424,8 +441,8 @@ LongTerms buildEmptyResult() { class DoubleTermsResults extends StandardTermsResultStrategy { - DoubleTermsResults(boolean showTermDocCountError) { - super(showTermDocCountError); + DoubleTermsResults(boolean showTermDocCountError, Aggregator aggregator) { + super(showTermDocCountError, aggregator); } @Override @@ -458,7 +475,6 @@ BucketUpdater bucketUpdater(long owningBucketOrd) { return (DoubleTerms.Bucket spare, BucketOrdsEnum ordsEnum, long docCount) -> { spare.term = NumericUtils.sortableLongToDouble(ordsEnum.value()); spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); }; } @@ -575,7 +591,6 @@ BucketUpdater bucketUpdater(long owningBucketOrd) { spare.term = ordsEnum.value(); spare.subsetDf = docCount; spare.supersetDf = backgroundFrequencies.freq(spare.term); - spare.bucketOrd = ordsEnum.ord(); // During shard-local down-selection we use subset/superset stats that are for this shard only // Back at the central reducer these properties will be updated with global stats spare.updateScore(significanceHeuristic, subsetSize, supersetSize); @@ -583,13 +598,13 @@ BucketUpdater bucketUpdater(long owningBucketOrd) { } @Override - ObjectArrayPriorityQueue buildPriorityQueue(int size) { + ObjectArrayPriorityQueue> buildPriorityQueue(int size) { return new BucketSignificancePriorityQueue<>(size, bigArrays()); } @Override - void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + void buildSubAggs(ObjectArray topBucketsPerOrd, LongArray ordsArray) throws IOException { + buildSubAggsForAllBuckets(topBucketsPerOrd, ordsArray, (b, aggs) -> b.aggregations = aggs); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java index 4922be7cec1b..c07c0726a4ae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java @@ -27,7 +27,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Comparator; import java.util.HashSet; import java.util.Map; import java.util.Objects; @@ -190,7 +189,6 @@ public boolean equals(Object obj) { protected final DocValueFormat format; protected final BucketCountThresholds bucketCountThresholds; protected final BucketOrder order; - protected final Comparator> partiallyBuiltBucketComparator; protected final Set aggsUsedForSorting; protected final SubAggCollectionMode collectMode; @@ -209,7 +207,9 @@ public TermsAggregator( super(name, factories, context, parent, metadata); this.bucketCountThresholds = bucketCountThresholds; this.order = order; - partiallyBuiltBucketComparator = order == null ? null : order.partiallyBuiltBucketComparator(b -> b.bucketOrd, this); + if (order != null) { + order.validate(this); + } this.format = format; if ((subAggsNeedScore() && descendsFromNestedAggregator(parent)) || context.isInSortOrderExecutionRequired()) { /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 2c7b768fcdbb..da5ae37b0822 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -195,12 +195,12 @@ private static TermsAggregatorSupplier numericSupplier() { if (includeExclude != null) { longFilter = includeExclude.convertToDoubleFilter(); } - resultStrategy = agg -> agg.new DoubleTermsResults(showTermDocCountError); + resultStrategy = agg -> agg.new DoubleTermsResults(showTermDocCountError, agg); } else { if (includeExclude != null) { longFilter = includeExclude.convertToLongFilter(valuesSourceConfig.format()); } - resultStrategy = agg -> agg.new LongTermsResults(showTermDocCountError); + resultStrategy = agg -> agg.new LongTermsResults(showTermDocCountError, agg); } return new NumericTermsAggregator( name, @@ -403,7 +403,7 @@ Aggregator create( name, factories, new MapStringTermsAggregator.ValuesSourceCollectorSource(valuesSourceConfig), - a -> a.new StandardTermsResults(valuesSourceConfig.getValuesSource()), + a -> a.new StandardTermsResults(valuesSourceConfig.getValuesSource(), a), order, valuesSourceConfig.format(), bucketCountThresholds, diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 098a2b2f45d2..3554a6dc08b9 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; @@ -92,7 +91,6 @@ * @see SearchRequest#source(SearchSourceBuilder) */ public final class SearchSourceBuilder implements Writeable, ToXContentObject, Rewriteable { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(SearchSourceBuilder.class); public static final ParseField FROM_FIELD = new ParseField("from"); public static final ParseField SIZE_FIELD = new ParseField("size"); diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java index f7f8cee30ee1..9eb0170af5ef 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java @@ -102,6 +102,14 @@ private SearchLookup(SearchLookup searchLookup, Set fieldChain) { this.fieldLookupProvider = searchLookup.fieldLookupProvider; } + private SearchLookup(SearchLookup searchLookup, SourceProvider sourceProvider, Set fieldChain) { + this.fieldChain = Collections.unmodifiableSet(fieldChain); + this.sourceProvider = sourceProvider; + this.fieldTypeLookup = searchLookup.fieldTypeLookup; + this.fieldDataLookup = searchLookup.fieldDataLookup; + this.fieldLookupProvider = searchLookup.fieldLookupProvider; + } + /** * Creates a copy of the current {@link SearchLookup} that looks fields up in the same way, but also tracks field references * in order to detect cycles and prevent resolving fields that depend on more than {@link #MAX_FIELD_CHAIN_DEPTH} other fields. @@ -144,4 +152,8 @@ public IndexFieldData getForField(MappedFieldType fieldType, MappedFieldType. public Source getSource(LeafReaderContext ctx, int doc) throws IOException { return sourceProvider.getSource(ctx, doc); } + + public SearchLookup swapSourceProvider(SourceProvider sourceProvider) { + return new SearchLookup(this, sourceProvider, fieldChain); + } } diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java b/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java index 9ab14aa9362b..d4127836a4e4 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java @@ -44,7 +44,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.RANK_DOCS_RETRIEVER; + return TransportVersions.V_8_16_0; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index db839de9f573..2ab6395db73b 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportMultiSearchAction; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.rest.RestStatus; @@ -46,6 +47,8 @@ */ public abstract class CompoundRetrieverBuilder> extends RetrieverBuilder { + public static final NodeFeature INNER_RETRIEVERS_FILTER_SUPPORT = new NodeFeature("inner_retrievers_filter_support"); + public record RetrieverSource(RetrieverBuilder retriever, SearchSourceBuilder source) {} protected final int rankWindowSize; @@ -64,9 +67,9 @@ public T addChild(RetrieverBuilder retrieverBuilder) { /** * Returns a clone of the original retriever, replacing the sub-retrievers with - * the provided {@code newChildRetrievers}. + * the provided {@code newChildRetrievers} and the filters with the {@code newPreFilterQueryBuilders}. */ - protected abstract T clone(List newChildRetrievers); + protected abstract T clone(List newChildRetrievers, List newPreFilterQueryBuilders); /** * Combines the provided {@code rankResults} to return the final top documents. @@ -85,13 +88,25 @@ public final RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOExceptio } // Rewrite prefilters - boolean hasChanged = false; + // We eagerly rewrite prefilters, because some of the innerRetrievers + // could be compound too, so we want to propagate all the necessary filter information to them + // and have it available as part of their own rewrite step var newPreFilters = rewritePreFilters(ctx); - hasChanged |= newPreFilters != preFilterQueryBuilders; + if (newPreFilters != preFilterQueryBuilders) { + return clone(innerRetrievers, newPreFilters); + } + boolean hasChanged = false; // Rewrite retriever sources List newRetrievers = new ArrayList<>(); for (var entry : innerRetrievers) { + // we propagate the filters only for compound retrievers as they won't be attached through + // the createSearchSourceBuilder. + // We could remove this check, but we would end up adding the same filters + // multiple times in case an inner retriever rewrites itself, when we re-enter to rewrite + if (entry.retriever.isCompound() && false == preFilterQueryBuilders.isEmpty()) { + entry.retriever.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders); + } RetrieverBuilder newRetriever = entry.retriever.rewrite(ctx); if (newRetriever != entry.retriever) { newRetrievers.add(new RetrieverSource(newRetriever, null)); @@ -106,7 +121,7 @@ public final RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOExceptio } } if (hasChanged) { - return clone(newRetrievers); + return clone(newRetrievers, newPreFilters); } // execute searches @@ -166,12 +181,7 @@ public void onFailure(Exception e) { }); }); - return new RankDocsRetrieverBuilder( - rankWindowSize, - newRetrievers.stream().map(s -> s.retriever).toList(), - results::get, - newPreFilters - ); + return new RankDocsRetrieverBuilder(rankWindowSize, newRetrievers.stream().map(s -> s.retriever).toList(), results::get); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java index 8be9a78dae15..f1464c41ca3b 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java @@ -184,8 +184,7 @@ public RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException { ll.onResponse(null); })); }); - var rewritten = new KnnRetrieverBuilder(this, () -> toSet.get(), null); - return rewritten; + return new KnnRetrieverBuilder(this, () -> toSet.get(), null); } return super.rewrite(ctx); } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java index 02f890f51d01..4d3f3fefd446 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java @@ -33,19 +33,13 @@ public class RankDocsRetrieverBuilder extends RetrieverBuilder { final List sources; final Supplier rankDocs; - public RankDocsRetrieverBuilder( - int rankWindowSize, - List sources, - Supplier rankDocs, - List preFilterQueryBuilders - ) { + public RankDocsRetrieverBuilder(int rankWindowSize, List sources, Supplier rankDocs) { this.rankWindowSize = rankWindowSize; this.rankDocs = rankDocs; if (sources == null || sources.isEmpty()) { throw new IllegalArgumentException("sources must not be null or empty"); } this.sources = sources; - this.preFilterQueryBuilders = preFilterQueryBuilders; } @Override @@ -73,10 +67,6 @@ private boolean sourceShouldRewrite(QueryRewriteContext ctx) throws IOException @Override public RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException { assert false == sourceShouldRewrite(ctx) : "retriever sources should be rewritten first"; - var rewrittenFilters = rewritePreFilters(ctx); - if (rewrittenFilters != preFilterQueryBuilders) { - return new RankDocsRetrieverBuilder(rankWindowSize, sources, rankDocs, rewrittenFilters); - } return this; } @@ -94,7 +84,7 @@ public QueryBuilder topDocsQuery() { boolQuery.should(query); } } - // ignore prefilters of this level, they are already propagated to children + // ignore prefilters of this level, they were already propagated to children return boolQuery; } @@ -133,7 +123,7 @@ public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder } else { rankQuery = new RankDocsQueryBuilder(rankDocResults, null, false); } - // ignore prefilters of this level, they are already propagated to children + // ignore prefilters of this level, they were already propagated to children searchSourceBuilder.query(rankQuery); if (sourceHasMinScore()) { searchSourceBuilder.minScore(this.minScore() == null ? Float.MIN_VALUE : this.minScore()); diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 6640f0f85840..2aaade35fb8f 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.fielddata.FieldData; @@ -67,7 +66,6 @@ * A geo distance based sorting on a geo point like field. */ public class GeoDistanceSortBuilder extends SortBuilder { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GeoDistanceSortBuilder.class); public static final String NAME = "_geo_distance"; public static final String ALTERNATIVE_NAME = "_geoDistance"; diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java index c8670a8dfeec..77d708432cf2 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java @@ -55,8 +55,7 @@ public ExactKnnQueryBuilder(StreamInput in) throws IOException { this.query = VectorData.fromFloats(in.readFloatArray()); } this.field = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS) - || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_0)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.vectorSimilarity = in.readOptionalFloat(); } else { this.vectorSimilarity = null; @@ -88,8 +87,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeFloatArray(query.asFloatVector()); } out.writeString(field); - if (out.getTransportVersion().onOrAfter(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS) - || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_0)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalFloat(vectorSimilarity); } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java index f52addefc8b1..b5ba97906f0e 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java @@ -71,8 +71,7 @@ public KnnScoreDocQueryBuilder(StreamInput in) throws IOException { this.fieldName = null; this.queryVector = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS) - || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_0)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { this.vectorSimilarity = in.readOptionalFloat(); } else { this.vectorSimilarity = null; @@ -116,8 +115,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeBoolean(false); } } - if (out.getTransportVersion().onOrAfter(TransportVersions.FIX_VECTOR_SIMILARITY_INNER_HITS) - || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_0)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeOptionalFloat(vectorSimilarity); } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index deb7e6bd035b..5dd2cbf32dd1 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -481,10 +481,9 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } parentBitSet = context.bitsetFilter(parentFilter); if (filterQuery != null) { - NestedHelper nestedHelper = new NestedHelper(context.nestedLookup(), context::isFieldMapped); // We treat the provided filter as a filter over PARENT documents, so if it might match nested documents // we need to adjust it. - if (nestedHelper.mightMatchNestedDocs(filterQuery)) { + if (NestedHelper.mightMatchNestedDocs(filterQuery, context)) { // Ensure that the query only returns parent documents matching `filterQuery` filterQuery = Queries.filtered(filterQuery, parentFilter); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/RegisteredPolicySnapshots.java b/server/src/main/java/org/elasticsearch/snapshots/RegisteredPolicySnapshots.java index f34b87669747..231894875b7f 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RegisteredPolicySnapshots.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RegisteredPolicySnapshots.java @@ -101,7 +101,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.REGISTER_SLM_STATS; + return TransportVersions.V_8_16_0; } @Override @@ -171,7 +171,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.REGISTER_SLM_STATS; + return TransportVersions.V_8_16_0; } } diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index 6c2cc5c1f4cc..08b12cec2e17 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -211,7 +211,7 @@ public void testDenseTransportVersions() { Set missingVersions = new TreeSet<>(); TransportVersion previous = null; for (var tv : TransportVersions.getAllVersions()) { - if (tv.before(TransportVersions.V_8_15_2)) { + if (tv.before(TransportVersions.V_8_16_0)) { continue; } if (previous == null) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParametersTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParametersTests.java index f37b1d1b4171..cfdbfdfbfcf8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParametersTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestParametersTests.java @@ -23,7 +23,7 @@ public class NodesStatsRequestParametersTests extends ESTestCase { public void testReadWriteMetricSet() { - for (var version : List.of(TransportVersions.V_8_15_0, TransportVersions.NODES_STATS_ENUM_SET)) { + for (var version : List.of(TransportVersions.V_8_15_0, TransportVersions.V_8_16_0)) { var randSet = randomSubsetOf(Metric.ALL); var metricsOut = randSet.isEmpty() ? EnumSet.noneOf(Metric.class) : EnumSet.copyOf(randSet); try { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java index 89ccd4ab63d7..46b757407e6a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java @@ -199,7 +199,7 @@ public void testSerializationBWC() throws IOException { randomQueryUsage(QUERY_TYPES.size()), version.onOrAfter(TransportVersions.V_8_12_0) ? randomRescorerUsage(RESCORER_TYPES.size()) : Map.of(), randomSectionsUsage(SECTIONS.size()), - version.onOrAfter(TransportVersions.RETRIEVERS_TELEMETRY_ADDED) ? randomRetrieversUsage(RETRIEVERS.size()) : Map.of(), + version.onOrAfter(TransportVersions.V_8_16_0) ? randomRetrieversUsage(RETRIEVERS.size()) : Map.of(), randomLongBetween(0, Long.MAX_VALUE) ); assertSerialization(testInstance, version); diff --git a/server/src/test/java/org/elasticsearch/bootstrap/PluginsResolverTests.java b/server/src/test/java/org/elasticsearch/bootstrap/PluginsResolverTests.java new file mode 100644 index 000000000000..798b576500d7 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/bootstrap/PluginsResolverTests.java @@ -0,0 +1,257 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.plugins.PluginBundle; +import org.elasticsearch.plugins.PluginDescriptor; +import org.elasticsearch.plugins.PluginsLoader; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.compiler.InMemoryJavaCompiler; +import org.elasticsearch.test.jar.JarUtils; + +import java.io.IOException; +import java.lang.module.Configuration; +import java.lang.module.ModuleFinder; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Stream; + +import static java.util.Map.entry; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +@ESTestCase.WithoutSecurityManager +public class PluginsResolverTests extends ESTestCase { + + private record TestPluginLayer(PluginBundle pluginBundle, ClassLoader pluginClassLoader, ModuleLayer pluginModuleLayer) + implements + PluginsLoader.PluginLayer {} + + public void testResolveModularPlugin() throws IOException, ClassNotFoundException { + String moduleName = "modular.plugin"; + String pluginName = "modular-plugin"; + + final Path home = createTempDir(); + + Path jar = createModularPluginJar(home, pluginName, moduleName, "p", "A"); + + var layer = createModuleLayer(moduleName, jar); + var loader = layer.findLoader(moduleName); + + PluginBundle bundle = createMockBundle(pluginName, moduleName, "p.A"); + PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); + + when(mockPluginsLoader.pluginLayers()).thenReturn(Stream.of(new TestPluginLayer(bundle, loader, layer))); + PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); + + var testClass = loader.loadClass("p.A"); + var resolvedPluginName = pluginsResolver.resolveClassToPluginName(testClass); + var unresolvedPluginName1 = pluginsResolver.resolveClassToPluginName(PluginsResolver.class); + var unresolvedPluginName2 = pluginsResolver.resolveClassToPluginName(String.class); + + assertEquals(pluginName, resolvedPluginName); + assertNull(unresolvedPluginName1); + assertNull(unresolvedPluginName2); + } + + public void testResolveMultipleModularPlugins() throws IOException, ClassNotFoundException { + final Path home = createTempDir(); + + Path jar1 = createModularPluginJar(home, "plugin1", "module.one", "p", "A"); + Path jar2 = createModularPluginJar(home, "plugin2", "module.two", "q", "B"); + + var layer1 = createModuleLayer("module.one", jar1); + var loader1 = layer1.findLoader("module.one"); + var layer2 = createModuleLayer("module.two", jar2); + var loader2 = layer2.findLoader("module.two"); + + PluginBundle bundle1 = createMockBundle("plugin1", "module.one", "p.A"); + PluginBundle bundle2 = createMockBundle("plugin2", "module.two", "q.B"); + PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); + + when(mockPluginsLoader.pluginLayers()).thenReturn( + Stream.of(new TestPluginLayer(bundle1, loader1, layer1), new TestPluginLayer(bundle2, loader2, layer2)) + ); + PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); + + var testClass1 = loader1.loadClass("p.A"); + var testClass2 = loader2.loadClass("q.B"); + var resolvedPluginName1 = pluginsResolver.resolveClassToPluginName(testClass1); + var resolvedPluginName2 = pluginsResolver.resolveClassToPluginName(testClass2); + + assertEquals("plugin1", resolvedPluginName1); + assertEquals("plugin2", resolvedPluginName2); + } + + public void testResolveReferencedModulesInModularPlugins() throws IOException, ClassNotFoundException { + final Path home = createTempDir(); + + Path dependencyJar = createModularPluginJar(home, "plugin1", "module.one", "p", "A"); + Path pluginJar = home.resolve("plugin2.jar"); + + Map sources = Map.ofEntries( + entry("module-info", "module module.two { exports q; requires module.one; }"), + entry("q.B", "package q; public class B { public p.A a = null; }") + ); + + var classToBytes = InMemoryJavaCompiler.compile(sources, "--add-modules", "module.one", "-p", home.toString()); + JarUtils.createJarWithEntries( + pluginJar, + Map.ofEntries(entry("module-info.class", classToBytes.get("module-info")), entry("q/B.class", classToBytes.get("q.B"))) + ); + + var layer = createModuleLayer("module.two", pluginJar, dependencyJar); + var loader = layer.findLoader("module.two"); + + PluginBundle bundle = createMockBundle("plugin2", "module.two", "q.B"); + PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); + + when(mockPluginsLoader.pluginLayers()).thenReturn(Stream.of(new TestPluginLayer(bundle, loader, layer))); + PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); + + var testClass1 = loader.loadClass("p.A"); + var testClass2 = loader.loadClass("q.B"); + var resolvedPluginName1 = pluginsResolver.resolveClassToPluginName(testClass1); + var resolvedPluginName2 = pluginsResolver.resolveClassToPluginName(testClass2); + + assertEquals("plugin2", resolvedPluginName1); + assertEquals("plugin2", resolvedPluginName2); + } + + public void testResolveMultipleNonModularPlugins() throws IOException, ClassNotFoundException { + final Path home = createTempDir(); + + Path jar1 = createNonModularPluginJar(home, "plugin1", "p", "A"); + Path jar2 = createNonModularPluginJar(home, "plugin2", "q", "B"); + + try (var loader1 = createClassLoader(jar1); var loader2 = createClassLoader(jar2)) { + + PluginBundle bundle1 = createMockBundle("plugin1", null, "p.A"); + PluginBundle bundle2 = createMockBundle("plugin2", null, "q.B"); + PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); + + when(mockPluginsLoader.pluginLayers()).thenReturn( + Stream.of( + new TestPluginLayer(bundle1, loader1, ModuleLayer.boot()), + new TestPluginLayer(bundle2, loader2, ModuleLayer.boot()) + ) + ); + PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); + + var testClass1 = loader1.loadClass("p.A"); + var testClass2 = loader2.loadClass("q.B"); + var resolvedPluginName1 = pluginsResolver.resolveClassToPluginName(testClass1); + var resolvedPluginName2 = pluginsResolver.resolveClassToPluginName(testClass2); + + assertEquals("plugin1", resolvedPluginName1); + assertEquals("plugin2", resolvedPluginName2); + } + } + + public void testResolveNonModularPlugin() throws IOException, ClassNotFoundException { + String pluginName = "non-modular-plugin"; + + final Path home = createTempDir(); + + Path jar = createNonModularPluginJar(home, pluginName, "p", "A"); + + try (var loader = createClassLoader(jar)) { + PluginBundle bundle = createMockBundle(pluginName, null, "p.A"); + PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); + + when(mockPluginsLoader.pluginLayers()).thenReturn(Stream.of(new TestPluginLayer(bundle, loader, ModuleLayer.boot()))); + PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); + + var testClass = loader.loadClass("p.A"); + var resolvedPluginName = pluginsResolver.resolveClassToPluginName(testClass); + var unresolvedPluginName1 = pluginsResolver.resolveClassToPluginName(PluginsResolver.class); + var unresolvedPluginName2 = pluginsResolver.resolveClassToPluginName(String.class); + + assertEquals(pluginName, resolvedPluginName); + assertNull(unresolvedPluginName1); + assertNull(unresolvedPluginName2); + } + } + + private static URLClassLoader createClassLoader(Path jar) throws MalformedURLException { + return new URLClassLoader(new URL[] { jar.toUri().toURL() }); + } + + private static ModuleLayer createModuleLayer(String moduleName, Path... jars) { + var finder = ModuleFinder.of(jars); + Configuration cf = ModuleLayer.boot().configuration().resolve(finder, ModuleFinder.of(), Set.of(moduleName)); + var moduleController = ModuleLayer.defineModulesWithOneLoader( + cf, + List.of(ModuleLayer.boot()), + ClassLoader.getPlatformClassLoader() + ); + return moduleController.layer(); + } + + private static PluginBundle createMockBundle(String pluginName, String moduleName, String fqClassName) { + PluginDescriptor pd = new PluginDescriptor( + pluginName, + null, + null, + null, + null, + fqClassName, + moduleName, + List.of(), + false, + false, + true, + false + ); + + PluginBundle bundle = mock(PluginBundle.class); + when(bundle.pluginDescriptor()).thenReturn(pd); + return bundle; + } + + private static Path createModularPluginJar(Path home, String pluginName, String moduleName, String packageName, String className) + throws IOException { + Path jar = home.resolve(pluginName + ".jar"); + String fqClassName = packageName + "." + className; + + Map sources = Map.ofEntries( + entry("module-info", "module " + moduleName + " { exports " + packageName + "; }"), + entry(fqClassName, "package " + packageName + "; public class " + className + " {}") + ); + + var classToBytes = InMemoryJavaCompiler.compile(sources); + JarUtils.createJarWithEntries( + jar, + Map.ofEntries( + entry("module-info.class", classToBytes.get("module-info")), + entry(packageName + "/" + className + ".class", classToBytes.get(fqClassName)) + ) + ); + return jar; + } + + private static Path createNonModularPluginJar(Path home, String pluginName, String packageName, String className) throws IOException { + Path jar = home.resolve(pluginName + ".jar"); + String fqClassName = packageName + "." + className; + + Map sources = Map.ofEntries( + entry(fqClassName, "package " + packageName + "; public class " + className + " {}") + ); + + var classToBytes = InMemoryJavaCompiler.compile(sources); + JarUtils.createJarWithEntries(jar, Map.ofEntries(entry(packageName + "/" + className + ".class", classToBytes.get(fqClassName)))); + return jar; + } +} diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/AbstractStreamTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/AbstractStreamTests.java index d2b6d0a6ec6d..afaa7a9a3288 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/AbstractStreamTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/AbstractStreamTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -53,8 +54,6 @@ import static java.time.Instant.ofEpochSecond; import static java.time.ZonedDateTime.ofInstant; -import static org.elasticsearch.TransportVersions.ZDT_NANOS_SUPPORT; -import static org.elasticsearch.TransportVersions.ZDT_NANOS_SUPPORT_BROKEN; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; @@ -729,15 +728,11 @@ public void testReadAfterReachingEndOfStream() throws IOException { } public void testZonedDateTimeSerialization() throws IOException { - checkZonedDateTimeSerialization(ZDT_NANOS_SUPPORT); - } - - public void testZonedDateTimeMillisBwcSerializationV1() throws IOException { - checkZonedDateTimeSerialization(TransportVersionUtils.getPreviousVersion(ZDT_NANOS_SUPPORT_BROKEN)); + checkZonedDateTimeSerialization(TransportVersions.V_8_16_0); } public void testZonedDateTimeMillisBwcSerialization() throws IOException { - checkZonedDateTimeSerialization(TransportVersionUtils.getPreviousVersion(ZDT_NANOS_SUPPORT)); + checkZonedDateTimeSerialization(TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_16_0)); } public void checkZonedDateTimeSerialization(TransportVersion tv) throws IOException { @@ -745,12 +740,12 @@ public void checkZonedDateTimeSerialization(TransportVersion tv) throws IOExcept assertGenericRoundtrip(ofInstant(ofEpochSecond(1), randomZone()), tv); // just want to test a large number that will use 5+ bytes long maxEpochSecond = Integer.MAX_VALUE; - long minEpochSecond = tv.between(ZDT_NANOS_SUPPORT_BROKEN, ZDT_NANOS_SUPPORT) ? 0 : Integer.MIN_VALUE; + long minEpochSecond = Integer.MIN_VALUE; assertGenericRoundtrip(ofInstant(ofEpochSecond(maxEpochSecond), randomZone()), tv); assertGenericRoundtrip(ofInstant(ofEpochSecond(randomLongBetween(minEpochSecond, maxEpochSecond)), randomZone()), tv); assertGenericRoundtrip(ofInstant(ofEpochSecond(randomLongBetween(minEpochSecond, maxEpochSecond), 1_000_000), randomZone()), tv); assertGenericRoundtrip(ofInstant(ofEpochSecond(randomLongBetween(minEpochSecond, maxEpochSecond), 999_000_000), randomZone()), tv); - if (tv.onOrAfter(ZDT_NANOS_SUPPORT)) { + if (tv.onOrAfter(TransportVersions.V_8_16_0)) { assertGenericRoundtrip( ofInstant(ofEpochSecond(randomLongBetween(minEpochSecond, maxEpochSecond), 999_999_999), randomZone()), tv diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/json/JsonXContentTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/json/JsonXContentTests.java index 55f6cc5498d8..4135ead545e0 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/json/JsonXContentTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/json/JsonXContentTests.java @@ -11,6 +11,9 @@ import org.elasticsearch.common.xcontent.BaseXContentTestCase; import org.elasticsearch.xcontent.XContentGenerator; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -28,4 +31,14 @@ public void testBigInteger() throws Exception { XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os); doTestBigInteger(generator, os); } + + public void testMalformedJsonFieldThrowsXContentException() throws Exception { + String json = "{\"test\":\"/*/}"; + try (XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, json)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + assertThrows(XContentParseException.class, () -> parser.text()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java b/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java index a7a1d33badf2..b2583eb176de 100644 --- a/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -27,12 +28,15 @@ import java.io.IOException; import java.util.Collections; +import static java.util.Collections.emptyMap; import static org.mockito.Mockito.mock; public class NestedHelperTests extends MapperServiceTestCase { MapperService mapperService; + SearchExecutionContext searchExecutionContext; + @Override public void setUp() throws Exception { super.setUp(); @@ -68,167 +72,185 @@ public void setUp() throws Exception { } } """; mapperService = createMapperService(mapping); - } - - private static NestedHelper buildNestedHelper(MapperService mapperService) { - return new NestedHelper(mapperService.mappingLookup().nestedLookup(), field -> mapperService.fieldType(field) != null); + searchExecutionContext = new SearchExecutionContext( + 0, + 0, + mapperService.getIndexSettings(), + null, + null, + mapperService, + mapperService.mappingLookup(), + null, + null, + parserConfig(), + writableRegistry(), + null, + null, + System::currentTimeMillis, + null, + null, + () -> true, + null, + emptyMap(), + MapperMetrics.NOOP + ); } public void testMatchAll() { - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(new MatchAllDocsQuery())); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(new MatchAllDocsQuery(), "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(new MatchAllDocsQuery(), "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(new MatchAllDocsQuery(), "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(new MatchAllDocsQuery(), "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(new MatchAllDocsQuery(), searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(new MatchAllDocsQuery(), "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(new MatchAllDocsQuery(), "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(new MatchAllDocsQuery(), "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(new MatchAllDocsQuery(), "nested_missing", searchExecutionContext)); } public void testMatchNo() { - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(new MatchNoDocsQuery())); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested1")); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested2")); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested3")); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested_missing")); + assertFalse(NestedHelper.mightMatchNestedDocs(new MatchNoDocsQuery(), searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested1", searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested2", searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested3", searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested_missing", searchExecutionContext)); } public void testTermsQuery() { Query termsQuery = mapperService.fieldType("foo").termsQuery(Collections.singletonList("bar"), null); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); + assertFalse(NestedHelper.mightMatchNestedDocs(termsQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested_missing", searchExecutionContext)); termsQuery = mapperService.fieldType("nested1.foo").termsQuery(Collections.singletonList("bar"), null); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(termsQuery, searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested_missing", searchExecutionContext)); termsQuery = mapperService.fieldType("nested2.foo").termsQuery(Collections.singletonList("bar"), null); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(termsQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested_missing", searchExecutionContext)); termsQuery = mapperService.fieldType("nested3.foo").termsQuery(Collections.singletonList("bar"), null); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(termsQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termsQuery, "nested_missing", searchExecutionContext)); } public void testTermQuery() { Query termQuery = mapperService.fieldType("foo").termQuery("bar", null); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(termQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested_missing")); + assertFalse(NestedHelper.mightMatchNestedDocs(termQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested_missing", searchExecutionContext)); termQuery = mapperService.fieldType("nested1.foo").termQuery("bar", null); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termQuery)); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(termQuery, searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested_missing", searchExecutionContext)); termQuery = mapperService.fieldType("nested2.foo").termQuery("bar", null); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(termQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested_missing", searchExecutionContext)); termQuery = mapperService.fieldType("nested3.foo").termQuery("bar", null); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(termQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(termQuery, "nested_missing", searchExecutionContext)); } public void testRangeQuery() { SearchExecutionContext context = mock(SearchExecutionContext.class); Query rangeQuery = mapperService.fieldType("foo2").rangeQuery(2, 5, true, true, null, null, null, context); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(rangeQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested_missing")); + assertFalse(NestedHelper.mightMatchNestedDocs(rangeQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested_missing", searchExecutionContext)); rangeQuery = mapperService.fieldType("nested1.foo2").rangeQuery(2, 5, true, true, null, null, null, context); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(rangeQuery)); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(rangeQuery, searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested_missing", searchExecutionContext)); rangeQuery = mapperService.fieldType("nested2.foo2").rangeQuery(2, 5, true, true, null, null, null, context); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(rangeQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(rangeQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested_missing", searchExecutionContext)); rangeQuery = mapperService.fieldType("nested3.foo2").rangeQuery(2, 5, true, true, null, null, null, context); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(rangeQuery)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested_missing")); + assertTrue(NestedHelper.mightMatchNestedDocs(rangeQuery, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(rangeQuery, "nested_missing", searchExecutionContext)); } public void testDisjunction() { BooleanQuery bq = new BooleanQuery.Builder().add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD) .add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD) .build(); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested1")); + assertFalse(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested1", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested1.foo", "bar")), Occur.SHOULD) .add(new TermQuery(new Term("nested1.foo", "baz")), Occur.SHOULD) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested1")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(bq, "nested1", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested2.foo", "bar")), Occur.SHOULD) .add(new TermQuery(new Term("nested2.foo", "baz")), Occur.SHOULD) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested2")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested2", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested3.foo", "bar")), Occur.SHOULD) .add(new TermQuery(new Term("nested3.foo", "baz")), Occur.SHOULD) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested3")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested3", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD) .add(new MatchAllDocsQuery(), Occur.SHOULD) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested1")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested1", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested1.foo", "bar")), Occur.SHOULD) .add(new MatchAllDocsQuery(), Occur.SHOULD) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested1")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested1", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested2.foo", "bar")), Occur.SHOULD) .add(new MatchAllDocsQuery(), Occur.SHOULD) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested2")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested2", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested3.foo", "bar")), Occur.SHOULD) .add(new MatchAllDocsQuery(), Occur.SHOULD) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested3")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested3", searchExecutionContext)); } private static Occur requiredOccur() { @@ -239,42 +261,42 @@ public void testConjunction() { BooleanQuery bq = new BooleanQuery.Builder().add(new TermQuery(new Term("foo", "bar")), requiredOccur()) .add(new MatchAllDocsQuery(), requiredOccur()) .build(); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested1")); + assertFalse(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested1", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested1.foo", "bar")), requiredOccur()) .add(new MatchAllDocsQuery(), requiredOccur()) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested1")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertFalse(NestedHelper.mightMatchNonNestedDocs(bq, "nested1", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested2.foo", "bar")), requiredOccur()) .add(new MatchAllDocsQuery(), requiredOccur()) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested2")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested2", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new TermQuery(new Term("nested3.foo", "bar")), requiredOccur()) .add(new MatchAllDocsQuery(), requiredOccur()) .build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested3")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested3", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), requiredOccur()).add(new MatchAllDocsQuery(), requiredOccur()).build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested1")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested1", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), requiredOccur()).add(new MatchAllDocsQuery(), requiredOccur()).build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested1")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested1", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), requiredOccur()).add(new MatchAllDocsQuery(), requiredOccur()).build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested2")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested2", searchExecutionContext)); bq = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), requiredOccur()).add(new MatchAllDocsQuery(), requiredOccur()).build(); - assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(bq)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(bq, "nested3")); + assertTrue(NestedHelper.mightMatchNestedDocs(bq, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(bq, "nested3", searchExecutionContext)); } public void testNested() throws IOException { @@ -288,11 +310,11 @@ public void testNested() throws IOException { .build(); assertEquals(expectedChildQuery, query.getChildQuery()); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(query)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested_missing")); + assertFalse(NestedHelper.mightMatchNestedDocs(query, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested_missing", searchExecutionContext)); queryBuilder = new NestedQueryBuilder("nested1", new TermQueryBuilder("nested1.foo", "bar"), ScoreMode.Avg); query = (ESToParentBlockJoinQuery) queryBuilder.toQuery(context); @@ -301,11 +323,11 @@ public void testNested() throws IOException { expectedChildQuery = new TermQuery(new Term("nested1.foo", "bar")); assertEquals(expectedChildQuery, query.getChildQuery()); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(query)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested_missing")); + assertFalse(NestedHelper.mightMatchNestedDocs(query, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested_missing", searchExecutionContext)); queryBuilder = new NestedQueryBuilder("nested2", new TermQueryBuilder("nested2.foo", "bar"), ScoreMode.Avg); query = (ESToParentBlockJoinQuery) queryBuilder.toQuery(context); @@ -316,11 +338,11 @@ public void testNested() throws IOException { .build(); assertEquals(expectedChildQuery, query.getChildQuery()); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(query)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested_missing")); + assertFalse(NestedHelper.mightMatchNestedDocs(query, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested_missing", searchExecutionContext)); queryBuilder = new NestedQueryBuilder("nested3", new TermQueryBuilder("nested3.foo", "bar"), ScoreMode.Avg); query = (ESToParentBlockJoinQuery) queryBuilder.toQuery(context); @@ -331,10 +353,10 @@ public void testNested() throws IOException { .build(); assertEquals(expectedChildQuery, query.getChildQuery()); - assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(query)); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested1")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested2")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested3")); - assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(query, "nested_missing")); + assertFalse(NestedHelper.mightMatchNestedDocs(query, searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested1", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested2", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested3", searchExecutionContext)); + assertTrue(NestedHelper.mightMatchNonNestedDocs(query, "nested_missing", searchExecutionContext)); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index d1ccfcbe7873..89fd25f638e1 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -95,7 +95,6 @@ import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; @@ -124,7 +123,6 @@ import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; import org.elasticsearch.search.rank.feature.RankFeatureShardResult; -import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.tasks.TaskCancelHelper; import org.elasticsearch.tasks.TaskCancelledException; @@ -2930,119 +2928,6 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { } } - /** - * This method tests validation that happens on the data nodes, which is now performed on the coordinating node. - * We still need the validation to cover for mixed cluster scenarios where the coordinating node does not perform the check yet. - */ - public void testParseSourceValidation() { - String index = randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT); - IndexService indexService = createIndex(index); - final SearchService service = getInstanceFromNode(SearchService.class); - { - // scroll and search_after - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.scroll(new TimeValue(1000)); - searchRequest.source().searchAfter(new String[] { "value" }); - assertCreateContextValidation(searchRequest, "`search_after` cannot be used in a scroll context.", indexService, service); - } - { - // scroll and collapse - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.scroll(new TimeValue(1000)); - searchRequest.source().collapse(new CollapseBuilder("field")); - assertCreateContextValidation(searchRequest, "cannot use `collapse` in a scroll context", indexService, service); - } - { - // search_after and `from` isn't valid - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.source().searchAfter(new String[] { "value" }); - searchRequest.source().from(10); - assertCreateContextValidation( - searchRequest, - "`from` parameter must be set to 0 when `search_after` is used", - indexService, - service - ); - } - { - // slice without scroll or pit - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.source().slice(new SliceBuilder(1, 10)); - assertCreateContextValidation( - searchRequest, - "[slice] can only be used with [scroll] or [point-in-time] requests", - indexService, - service - ); - } - { - // stored fields disabled with _source requested - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.source().storedField("_none_"); - searchRequest.source().fetchSource(true); - assertCreateContextValidation( - searchRequest, - "[stored_fields] cannot be disabled if [_source] is requested", - indexService, - service - ); - } - { - // stored fields disabled with fetch fields requested - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.source().storedField("_none_"); - searchRequest.source().fetchSource(false); - searchRequest.source().fetchField("field"); - assertCreateContextValidation( - searchRequest, - "[stored_fields] cannot be disabled when using the [fields] option", - indexService, - service - ); - } - } - - private static void assertCreateContextValidation( - SearchRequest searchRequest, - String errorMessage, - IndexService indexService, - SearchService searchService - ) { - ShardId shardId = new ShardId(indexService.index(), 0); - long nowInMillis = System.currentTimeMillis(); - String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10); - searchRequest.allowPartialSearchResults(randomBoolean()); - ShardSearchRequest request = new ShardSearchRequest( - OriginalIndices.NONE, - searchRequest, - shardId, - 0, - indexService.numberOfShards(), - AliasFilter.EMPTY, - 1f, - nowInMillis, - clusterAlias - ); - - SearchShardTask task = new SearchShardTask(1, "type", "action", "description", null, emptyMap()); - - ReaderContext readerContext = null; - try { - ReaderContext createOrGetReaderContext = searchService.createOrGetReaderContext(request); - readerContext = createOrGetReaderContext; - IllegalArgumentException exception = expectThrows( - IllegalArgumentException.class, - () -> searchService.createContext(createOrGetReaderContext, request, task, ResultsType.QUERY, randomBoolean()) - ); - assertThat(exception.getMessage(), containsString(errorMessage)); - } finally { - if (readerContext != null) { - readerContext.close(); - searchService.freeReaderContext(readerContext.id()); - } - } - } - private static ReaderContext createReaderContext(IndexService indexService, IndexShard indexShard) { return new ReaderContext( new ShardSearchContextId(UUIDs.randomBase64UUID(), randomNonNegativeLong()), diff --git a/server/src/test/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilderTests.java b/server/src/test/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilderTests.java index af6782c45dce..ccf33c0b71b6 100644 --- a/server/src/test/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilderTests.java @@ -95,12 +95,7 @@ private List preFilters(QueryRewriteContext queryRewriteContext) t } private RankDocsRetrieverBuilder createRandomRankDocsRetrieverBuilder(QueryRewriteContext queryRewriteContext) throws IOException { - return new RankDocsRetrieverBuilder( - randomIntBetween(1, 100), - innerRetrievers(queryRewriteContext), - rankDocsSupplier(), - preFilters(queryRewriteContext) - ); + return new RankDocsRetrieverBuilder(randomIntBetween(1, 100), innerRetrievers(queryRewriteContext), rankDocsSupplier()); } public void testExtractToSearchSourceBuilder() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/vectors/TestQueryVectorBuilderPlugin.java b/server/src/test/java/org/elasticsearch/search/vectors/TestQueryVectorBuilderPlugin.java index c47c8c16f6a2..5733a51bb7e9 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/TestQueryVectorBuilderPlugin.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/TestQueryVectorBuilderPlugin.java @@ -27,9 +27,9 @@ /** * A SearchPlugin to exercise query vector builder */ -class TestQueryVectorBuilderPlugin implements SearchPlugin { +public class TestQueryVectorBuilderPlugin implements SearchPlugin { - static class TestQueryVectorBuilder implements QueryVectorBuilder { + public static class TestQueryVectorBuilder implements QueryVectorBuilder { private static final String NAME = "test_query_vector_builder"; private static final ParseField QUERY_VECTOR = new ParseField("query_vector"); @@ -47,11 +47,11 @@ static class TestQueryVectorBuilder implements QueryVectorBuilder { private List vectorToBuild; - TestQueryVectorBuilder(List vectorToBuild) { + public TestQueryVectorBuilder(List vectorToBuild) { this.vectorToBuild = vectorToBuild; } - TestQueryVectorBuilder(float[] expected) { + public TestQueryVectorBuilder(float[] expected) { this.vectorToBuild = new ArrayList<>(expected.length); for (float f : expected) { vectorToBuild.add(f); diff --git a/settings.gradle b/settings.gradle index 4722fc311480..747fbb3e439f 100644 --- a/settings.gradle +++ b/settings.gradle @@ -73,6 +73,7 @@ List projects = [ 'distribution:packages:aarch64-rpm', 'distribution:packages:rpm', 'distribution:bwc:bugfix', + 'distribution:bwc:bugfix2', 'distribution:bwc:maintenance', 'distribution:bwc:minor', 'distribution:bwc:staged', diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index a9a825af3b86..91875600ec00 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -45,7 +45,7 @@ public MockPluginsService(Settings settings, Environment environment, Collection super( settings, environment.configFile(), - new PluginsLoader(Collections.emptyList(), Collections.emptyList(), Collections.emptyMap()) + new PluginsLoader(Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), Collections.emptySet()) ); List pluginsLoaded = new ArrayList<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/retriever/TestCompoundRetrieverBuilder.java b/test/framework/src/main/java/org/elasticsearch/search/retriever/TestCompoundRetrieverBuilder.java index 9f199aa7f3ef..4a5f280c10a9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/retriever/TestCompoundRetrieverBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/search/retriever/TestCompoundRetrieverBuilder.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.retriever; import org.apache.lucene.search.ScoreDoc; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,16 +24,17 @@ public class TestCompoundRetrieverBuilder extends CompoundRetrieverBuilder(), rankWindowSize); + this(new ArrayList<>(), rankWindowSize, new ArrayList<>()); } - TestCompoundRetrieverBuilder(List childRetrievers, int rankWindowSize) { + TestCompoundRetrieverBuilder(List childRetrievers, int rankWindowSize, List preFilterQueryBuilders) { super(childRetrievers, rankWindowSize); + this.preFilterQueryBuilders = preFilterQueryBuilders; } @Override - protected TestCompoundRetrieverBuilder clone(List newChildRetrievers) { - return new TestCompoundRetrieverBuilder(newChildRetrievers, rankWindowSize); + protected TestCompoundRetrieverBuilder clone(List newChildRetrievers, List newPreFilterQueryBuilders) { + return new TestCompoundRetrieverBuilder(newChildRetrievers, rankWindowSize, newPreFilterQueryBuilders); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index b4f4243fb90f..4428afaaeabe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -333,8 +333,11 @@ public void initClient() throws IOException { assert testFeatureServiceInitialized() == false; clusterHosts = parseClusterHosts(getTestRestCluster()); logger.info("initializing REST clients against {}", clusterHosts); - client = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); - adminClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); + var clientSettings = restClientSettings(); + var adminSettings = restAdminSettings(); + var hosts = clusterHosts.toArray(new HttpHost[0]); + client = buildClient(clientSettings, hosts); + adminClient = clientSettings.equals(adminSettings) ? client : buildClient(adminSettings, hosts); availableFeatures = EnumSet.of(ProductFeature.LEGACY_TEMPLATES); Set versions = new HashSet<>(); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 0d42a2856a10..85510c8a989c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -37,9 +37,6 @@ public class InternalMultiTerms extends AbstractInternalTerms { - - long bucketOrd; - protected long docCount; protected InternalAggregations aggregations; private long docCountError; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index 1691aedf543f..5c10e2c8feeb 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; @@ -40,6 +41,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.DeferableBucketAggregator; +import org.elasticsearch.search.aggregations.bucket.terms.BucketAndOrd; import org.elasticsearch.search.aggregations.bucket.terms.BucketPriorityQueue; import org.elasticsearch.search.aggregations.bucket.terms.BytesKeyedBucketOrds; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; @@ -72,7 +74,7 @@ class MultiTermsAggregator extends DeferableBucketAggregator { protected final List formats; protected final TermsAggregator.BucketCountThresholds bucketCountThresholds; protected final BucketOrder order; - protected final Comparator partiallyBuiltBucketComparator; + protected final Comparator> partiallyBuiltBucketComparator; protected final Set aggsUsedForSorting; protected final SubAggCollectionMode collectMode; private final List values; @@ -99,7 +101,7 @@ protected MultiTermsAggregator( super(name, factories, context, parent, metadata); this.bucketCountThresholds = bucketCountThresholds; this.order = order; - partiallyBuiltBucketComparator = order == null ? null : order.partiallyBuiltBucketComparator(b -> b.bucketOrd, this); + partiallyBuiltBucketComparator = order == null ? null : order.partiallyBuiltBucketComparator(this); this.formats = formats; this.showTermDocCountError = showTermDocCountError; if (subAggsNeedScore() && descendsFromNestedAggregator(parent) || context.isInSortOrderExecutionRequired()) { @@ -242,52 +244,67 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()) ) { - for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { - final long owningBucketOrd = owningBucketOrds.get(ordIdx); - long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); - - int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); - try ( - ObjectArrayPriorityQueue ordered = new BucketPriorityQueue<>( - size, - bigArrays(), - partiallyBuiltBucketComparator - ) - ) { - InternalMultiTerms.Bucket spare = null; - BytesRef spareKey = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts.increment(ordIdx, docCount); - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - checkRealMemoryCBForInternalBucket(); - spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); - spareKey = new BytesRef(); - } - ordsEnum.readValue(spareKey); - spare.terms = unpackTerms(spareKey); - spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); - spare = ordered.insertWithOverflow(spare); - } + try (IntArray bucketsToCollect = bigArrays().newIntArray(owningBucketOrds.size())) { + long ordsToCollect = 0; + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)), bucketCountThresholds.getShardSize()); + ordsToCollect += size; + bucketsToCollect.set(ordIdx, size); + } + try (LongArray ordsArray = bigArrays().newLongArray(ordsToCollect)) { + long ordsCollected = 0; + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); + + int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); + try ( + ObjectArrayPriorityQueue> ordered = new BucketPriorityQueue<>( + size, + bigArrays(), + partiallyBuiltBucketComparator + ) + ) { + BucketAndOrd spare = null; + BytesRef spareKey = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = new BucketAndOrd<>( + new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters) + ); + spareKey = new BytesRef(); + } + ordsEnum.readValue(spareKey); + spare.bucket.terms = unpackTerms(spareKey); + spare.bucket.docCount = docCount; + spare.ord = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } - // Get the top buckets - InternalMultiTerms.Bucket[] bucketsForOrd = new InternalMultiTerms.Bucket[(int) ordered.size()]; - topBucketsPerOrd.set(ordIdx, bucketsForOrd); - for (int b = (int) ordered.size() - 1; b >= 0; --b) { - InternalMultiTerms.Bucket[] buckets = topBucketsPerOrd.get(ordIdx); - buckets[b] = ordered.pop(); - otherDocCounts.increment(ordIdx, -buckets[b].getDocCount()); + // Get the top buckets + int orderedSize = (int) ordered.size(); + InternalMultiTerms.Bucket[] buckets = new InternalMultiTerms.Bucket[orderedSize]; + for (int i = orderedSize - 1; i >= 0; --i) { + BucketAndOrd bucketAndOrd = ordered.pop(); + buckets[i] = bucketAndOrd.bucket; + ordsArray.set(ordsCollected + i, bucketAndOrd.ord); + otherDocCounts.increment(ordIdx, -buckets[i].getDocCount()); + } + topBucketsPerOrd.set(ordIdx, buckets); + ordsCollected += orderedSize; + } } + buildSubAggsForAllBuckets(topBucketsPerOrd, ordsArray, (b, a) -> b.aggregations = a); } } - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - return buildAggregations( Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildResult(otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java index 7a31888a440c..a61a86eea710 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java @@ -111,7 +111,7 @@ public LifecycleStats( } public static LifecycleStats read(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.GLOBAL_RETENTION_TELEMETRY)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { return new LifecycleStats( in.readVLong(), in.readBoolean(), @@ -139,7 +139,7 @@ public static LifecycleStats read(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.GLOBAL_RETENTION_TELEMETRY)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVLong(dataStreamsWithLifecyclesCount); out.writeBoolean(defaultRolloverUsed); dataRetentionStats.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java index 0457de6edcc9..36322ed6c6cb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java @@ -209,7 +209,7 @@ public CacheStats(StreamInput in) throws IOException { in.readVLong(), in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? in.readLong() : -1, in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? in.readLong() : -1, - in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_STATS_SIZE_ADDED) ? in.readLong() : -1 + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readLong() : -1 ); } @@ -237,7 +237,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(hitsTimeInMillis); out.writeLong(missesTimeInMillis); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_STATS_SIZE_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeLong(cacheSizeInBytes); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponse.java index 33402671a223..5d635c97d9c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleExplainResponse.java @@ -328,7 +328,7 @@ public IndexLifecycleExplainResponse(StreamInput in) throws IOException { } else { indexCreationDate = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.RETAIN_ILM_STEP_INFO)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { previousStepInfo = in.readOptionalBytesReference(); } else { previousStepInfo = null; @@ -379,7 +379,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { out.writeOptionalLong(indexCreationDate); } - if (out.getTransportVersion().onOrAfter(TransportVersions.RETAIN_ILM_STEP_INFO)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalBytesReference(previousStepInfo); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java index c06dcc0f083d..da64df2672bd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.TransportVersions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -32,7 +33,6 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.TransportVersions.ILM_ADD_SEARCHABLE_SNAPSHOT_TOTAL_SHARDS_PER_NODE; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOTS_REPOSITORY_NAME_SETTING_KEY; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOTS_SNAPSHOT_NAME_SETTING_KEY; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY; @@ -102,9 +102,7 @@ public SearchableSnapshotAction(String snapshotRepository) { public SearchableSnapshotAction(StreamInput in) throws IOException { this.snapshotRepository = in.readString(); this.forceMergeIndex = in.readBoolean(); - this.totalShardsPerNode = in.getTransportVersion().onOrAfter(ILM_ADD_SEARCHABLE_SNAPSHOT_TOTAL_SHARDS_PER_NODE) - ? in.readOptionalInt() - : null; + this.totalShardsPerNode = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readOptionalInt() : null; } boolean isForceMergeIndex() { @@ -424,7 +422,7 @@ public String getWriteableName() { public void writeTo(StreamOutput out) throws IOException { out.writeString(snapshotRepository); out.writeBoolean(forceMergeIndex); - if (out.getTransportVersion().onOrAfter(ILM_ADD_SEARCHABLE_SNAPSHOT_TOTAL_SHARDS_PER_NODE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalInt(totalShardsPerNode); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java index 226fe3630b38..c3f991a8b4e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java @@ -127,7 +127,7 @@ public Response(StreamInput in) throws IOException { pipelineIds = Set.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { indexes = in.readCollectionAsSet(StreamInput::readString); dryRunMessage = in.readOptionalString(); } else { @@ -143,7 +143,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeCollection(pipelineIds, StreamOutput::writeString); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeCollection(indexes, StreamOutput::writeString); out.writeOptionalString(dryRunMessage); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java index ea0462d0f103..ba3d417d0267 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java @@ -63,7 +63,7 @@ public Request(StreamInput in) throws IOException { this.inferenceEntityId = in.readString(); this.taskType = TaskType.fromStream(in); if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ) - || in.getTransportVersion().isPatchFrom(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ_BACKPORT_8_16)) { + || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_0)) { this.persistDefaultConfig = in.readBoolean(); } else { this.persistDefaultConfig = PERSIST_DEFAULT_CONFIGS; @@ -89,7 +89,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(inferenceEntityId); taskType.writeTo(out); if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ) - || out.getTransportVersion().isPatchFrom(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ_BACKPORT_8_16)) { + || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_0)) { out.writeBoolean(this.persistDefaultConfig); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java index 0645299dfc30..8c4611f05e72 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java @@ -66,7 +66,7 @@ public MachineLearningFeatureSetUsage(StreamInput in) throws IOException { this.analyticsUsage = in.readGenericMap(); this.inferenceUsage = in.readGenericMap(); this.nodeCount = in.readInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TELEMETRY_MEMORY_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.memoryUsage = in.readGenericMap(); } else { this.memoryUsage = Map.of(); @@ -86,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeGenericMap(analyticsUsage); out.writeGenericMap(inferenceUsage); out.writeInt(nodeCount); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TELEMETRY_MEMORY_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeGenericMap(memoryUsage); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java index c6976ab4b513..2aedb4634753 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java @@ -47,7 +47,7 @@ public Request(StartTrainedModelDeploymentAction.TaskParams taskParams, Adaptive public Request(StreamInput in) throws IOException { super(in); this.taskParams = new StartTrainedModelDeploymentAction.TaskParams(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.adaptiveAllocationsSettings = in.readOptionalWriteable(AdaptiveAllocationsSettings::new); } else { this.adaptiveAllocationsSettings = null; @@ -63,7 +63,7 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); taskParams.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(adaptiveAllocationsSettings); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index b298d486c9e0..1bf92262b30f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -169,7 +169,7 @@ public Request(StreamInput in) throws IOException { modelId = in.readString(); timeout = in.readTimeValue(); waitForState = in.readEnum(AllocationStatus.State.class); - if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { numberOfAllocations = in.readOptionalVInt(); } else { numberOfAllocations = in.readVInt(); @@ -189,7 +189,7 @@ public Request(StreamInput in) throws IOException { } else { this.deploymentId = modelId; } - if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.adaptiveAllocationsSettings = in.readOptionalWriteable(AdaptiveAllocationsSettings::new); } else { this.adaptiveAllocationsSettings = null; @@ -297,7 +297,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); out.writeTimeValue(timeout); out.writeEnum(waitForState); - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalVInt(numberOfAllocations); } else { out.writeVInt(numberOfAllocations); @@ -313,7 +313,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeString(deploymentId); } - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(adaptiveAllocationsSettings); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java index cb578fdb157d..2018c9526ec8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java @@ -87,7 +87,7 @@ public Request(String deploymentId) { public Request(StreamInput in) throws IOException { super(in); deploymentId = in.readString(); - if (in.getTransportVersion().before(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (in.getTransportVersion().before(TransportVersions.V_8_16_0)) { numberOfAllocations = in.readVInt(); adaptiveAllocationsSettings = null; isInternal = false; @@ -134,7 +134,7 @@ public AdaptiveAllocationsSettings getAdaptiveAllocationsSettings() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(deploymentId); - if (out.getTransportVersion().before(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (out.getTransportVersion().before(TransportVersions.V_8_16_0)) { out.writeVInt(numberOfAllocations); } else { out.writeOptionalVInt(numberOfAllocations); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java index b007c1da451f..742daa1bf613 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEvent.java @@ -115,7 +115,7 @@ public ScheduledEvent(StreamInput in) throws IOException { description = in.readString(); startTime = in.readInstant(); endTime = in.readInstant(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_SCHEDULED_EVENT_TIME_SHIFT_CONFIGURATION)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { skipResult = in.readBoolean(); skipModelUpdate = in.readBoolean(); forceTimeShift = in.readOptionalInt(); @@ -204,7 +204,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(description); out.writeInstant(startTime); out.writeInstant(endTime); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_SCHEDULED_EVENT_TIME_SHIFT_CONFIGURATION)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeBoolean(skipResult); out.writeBoolean(skipModelUpdate); out.writeOptionalInt(forceTimeShift); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AssignmentStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AssignmentStats.java index 858d97bf6f95..31b513eea161 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AssignmentStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AssignmentStats.java @@ -483,7 +483,7 @@ public AssignmentStats(StreamInput in) throws IOException { } else { deploymentId = modelId; } - if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { adaptiveAllocationsSettings = in.readOptionalWriteable(AdaptiveAllocationsSettings::new); } else { adaptiveAllocationsSettings = null; @@ -666,7 +666,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeString(deploymentId); } - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(adaptiveAllocationsSettings); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index efd07cceae09..249e27d6f25e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -178,7 +178,7 @@ public TrainedModelAssignment(StreamInput in) throws IOException { } else { this.maxAssignedAllocations = totalCurrentAllocations(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.adaptiveAllocationsSettings = in.readOptionalWriteable(AdaptiveAllocationsSettings::new); } else { this.adaptiveAllocationsSettings = null; @@ -382,7 +382,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { out.writeVInt(maxAssignedAllocations); } - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(adaptiveAllocationsSettings); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java index 9929e59a9c80..a4d7c9c7fa08 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java @@ -41,7 +41,6 @@ public class LearningToRankConfig extends RegressionConfig implements Rewriteable { public static final ParseField NAME = new ParseField("learning_to_rank"); - static final TransportVersion MIN_SUPPORTED_TRANSPORT_VERSION = TransportVersions.LTR_SERVERLESS_RELEASE; public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); public static final ParseField FEATURE_EXTRACTORS = new ParseField("feature_extractors"); public static final ParseField DEFAULT_PARAMS = new ParseField("default_params"); @@ -226,7 +225,7 @@ public MlConfigVersion getMinimalSupportedMlConfigVersion() { @Override public TransportVersion getMinimalSupportedTransportVersion() { - return MIN_SUPPORTED_TRANSPORT_VERSION; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java index eb952a7dc7e5..4bdced325311 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java @@ -68,7 +68,7 @@ public DetectionRule(StreamInput in) throws IOException { actions = in.readEnumSet(RuleAction.class); scope = new RuleScope(in); conditions = in.readCollectionAsList(RuleCondition::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_ADD_DETECTION_RULE_PARAMS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { params = new RuleParams(in); } else { params = new RuleParams(); @@ -80,7 +80,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeEnumSet(actions); scope.writeTo(out); out.writeCollection(conditions); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_ADD_DETECTION_RULE_PARAMS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { params.writeTo(out); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java index 14ecf4cb0d6e..24f0a5243620 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java @@ -160,10 +160,8 @@ private static void buildRoleQuery( failIfQueryUsesClient(queryBuilder, context); Query roleQuery = context.toQuery(queryBuilder).query(); filter.add(roleQuery, SHOULD); - NestedLookup nestedLookup = context.nestedLookup(); - if (nestedLookup != NestedLookup.EMPTY) { - NestedHelper nestedHelper = new NestedHelper(nestedLookup, context::isFieldMapped); - if (nestedHelper.mightMatchNestedDocs(roleQuery)) { + if (context.nestedLookup() != NestedLookup.EMPTY) { + if (NestedHelper.mightMatchNestedDocs(roleQuery, context)) { roleQuery = new BooleanQuery.Builder().add(roleQuery, FILTER) .add(Queries.newNonNestedFilter(context.indexVersionCreated()), FILTER) .build(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java index b93aa079a28d..148fdf21fd2d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java @@ -82,7 +82,7 @@ public static ConfigurableClusterPrivilege[] readArray(StreamInput in) throws IO * Utility method to write an array of {@link ConfigurableClusterPrivilege} objects to a {@link StreamOutput} */ public static void writeArray(StreamOutput out, ConfigurableClusterPrivilege[] privileges) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeArray(WRITER, privileges); } else { out.writeArray( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsageTests.java index 87d658c6f983..e9ec8dfe8ee5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsageTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsageTests.java @@ -57,7 +57,7 @@ protected MachineLearningFeatureSetUsage mutateInstance(MachineLearningFeatureSe @Override protected MachineLearningFeatureSetUsage mutateInstanceForVersion(MachineLearningFeatureSetUsage instance, TransportVersion version) { - if (version.before(TransportVersions.ML_TELEMETRY_MEMORY_ADDED)) { + if (version.before(TransportVersions.V_8_16_0)) { return new MachineLearningFeatureSetUsage( instance.available(), instance.enabled(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java index 3a61c848d381..d694b2681ee8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java @@ -68,8 +68,7 @@ public QueryRulesetListItem(StreamInput in) throws IOException { this.criteriaTypeToCountMap = Map.of(); } TransportVersion streamTransportVersion = in.getTransportVersion(); - if (streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_15) - || streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) + if (streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) || streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { this.ruleTypeToCountMap = in.readMap(m -> in.readEnum(QueryRule.QueryRuleType.class), StreamInput::readInt); } else { @@ -104,8 +103,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(criteriaTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt); } TransportVersion streamTransportVersion = out.getTransportVersion(); - if (streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_15) - || streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) + if (streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) || streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { out.writeMap(ruleTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java index 54a89d061de3..5b27cc7a3e05 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java @@ -110,12 +110,14 @@ public QueryRuleRetrieverBuilder( Map matchCriteria, List retrieverSource, int rankWindowSize, - String retrieverName + String retrieverName, + List preFilterQueryBuilders ) { super(retrieverSource, rankWindowSize); this.rulesetIds = rulesetIds; this.matchCriteria = matchCriteria; this.retrieverName = retrieverName; + this.preFilterQueryBuilders = preFilterQueryBuilders; } @Override @@ -156,8 +158,15 @@ public void doToXContent(XContentBuilder builder, Params params) throws IOExcept } @Override - protected QueryRuleRetrieverBuilder clone(List newChildRetrievers) { - return new QueryRuleRetrieverBuilder(rulesetIds, matchCriteria, newChildRetrievers, rankWindowSize, retrieverName); + protected QueryRuleRetrieverBuilder clone(List newChildRetrievers, List newPreFilterQueryBuilders) { + return new QueryRuleRetrieverBuilder( + rulesetIds, + matchCriteria, + newChildRetrievers, + rankWindowSize, + retrieverName, + newPreFilterQueryBuilders + ); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java index 27d5e240534b..c822dd123d3f 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java @@ -59,8 +59,7 @@ protected ListQueryRulesetsAction.Response mutateInstanceForVersion( ListQueryRulesetsAction.Response instance, TransportVersion version ) { - if (version.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_15) - || version.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) + if (version.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) || version.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { return instance; } else if (version.onOrAfter(QueryRulesetListItem.EXPANDED_RULESET_COUNT_TRANSPORT_VERSION)) { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionRequestBWCSerializingTests.java index 7041de1106b5..8582ee1bd8d2 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionRequestBWCSerializingTests.java @@ -51,6 +51,6 @@ protected TestQueryRulesetAction.Request mutateInstanceForVersion(TestQueryRules @Override protected List bwcVersions() { - return getAllBWCVersions().stream().filter(v -> v.onOrAfter(TransportVersions.QUERY_RULE_TEST_API)).collect(Collectors.toList()); + return getAllBWCVersions().stream().filter(v -> v.onOrAfter(TransportVersions.V_8_16_0)).collect(Collectors.toList()); } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionResponseBWCSerializingTests.java index a6562fb7b52a..142310ac4033 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/TestQueryRulesetActionResponseBWCSerializingTests.java @@ -47,6 +47,6 @@ protected TestQueryRulesetAction.Response mutateInstanceForVersion(TestQueryRule @Override protected List bwcVersions() { - return getAllBWCVersions().stream().filter(v -> v.onOrAfter(TransportVersions.QUERY_RULE_TEST_API)).collect(Collectors.toList()); + return getAllBWCVersions().stream().filter(v -> v.onOrAfter(TransportVersions.V_8_16_0)).collect(Collectors.toList()); } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index 7836522c7713..468d076c1b7e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -107,9 +107,7 @@ protected Query asQuery(Not not, TranslatorHandler handler) { } public static Query doTranslate(Not not, TranslatorHandler handler) { - Query wrappedQuery = handler.asQuery(not.field()); - Query q = wrappedQuery.negate(not.source()); - return q; + return handler.asQuery(not.field()).negate(not.source()); } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index a63571093ba5..d86cdb0de038 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -32,6 +32,113 @@ import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; +/** + * This enum represents data types the ES|QL query processing layer is able to + * interact with in some way. This includes fully representable types (e.g. + * {@link DataType#LONG}, numeric types which we promote (e.g. {@link DataType#SHORT}) + * or fold into other types (e.g. {@link DataType#DATE_PERIOD}) early in the + * processing pipeline, types for internal use + * cases (e.g. {@link DataType#PARTIAL_AGG}), and types which the language + * doesn't support, but require special handling anyway (e.g. + * {@link DataType#OBJECT}) + * + *

Process for adding a new data type

+ * Note: it is not expected that all the following steps be done in a single PR. + * Use capabilities to gate tests as you go, and use as many PRs as you think + * appropriate. New data types are complex, and smaller PRs will make reviews + * easier. + *
    + *
  • + * Create a new feature flag for the type in {@link EsqlCorePlugin}. We + * recommend developing the data type over a series of smaller PRs behind + * a feature flag; even for relatively simple data types.
  • + *
  • + * Add a capability to EsqlCapabilities related to the new type, and + * gated by the feature flag you just created. Again, using the feature + * flag is preferred over snapshot-only. As development progresses, you may + * need to add more capabilities related to the new type, e.g. for + * supporting specific functions. This is fine, and expected.
  • + *
  • + * Create a new CSV test file for the new type. You'll either need to + * create a new data file as well, or add values of the new type to + * and existing data file. See CsvTestDataLoader for creating a new data + * set.
  • + *
  • + * In the new CSV test file, start adding basic functionality tests. + * These should include reading and returning values, both from indexed data + * and from the ROW command. It should also include functions that support + * "every" type, such as Case or MvFirst.
  • + *
  • + * Add the new type to the CsvTestUtils#Type enum, if it isn't already + * there. You also need to modify CsvAssert to support reading values + * of the new type.
  • + *
  • + * At this point, the CSV tests should fail with a sensible ES|QL error + * message. Make sure they're failing in ES|QL, not in the test + * framework.
  • + *
  • + * Add the new data type to this enum. This will cause a bunch of + * compile errors for switch statements throughout the code. Resolve those + * as appropriate. That is the main way in which the new type will be tied + * into the framework.
  • + *
  • + * Add the new type to the {@link DataType#UNDER_CONSTRUCTION} + * collection. This is used by the test framework to disable some checks + * around how functions report their supported types, which would otherwise + * generate a lot of noise while the type is still in development.
  • + *
  • + * Add typed data generators to TestCaseSupplier, and make sure all + * functions that support the new type have tests for it.
  • + *
  • + * Work to support things all types should do. Equality and the + * "typeless" MV functions (MvFirst, MvLast, and MvCount) should work for + * most types. Case and Coalesce should also support all types. + * If the type has a natural ordering, make sure to test + * sorting and the other binary comparisons. Make sure these functions all + * have CSV tests that run against indexed data.
  • + *
  • + * Add conversion functions as appropriate. Almost all types should + * support ToString, and should have a "ToType" function that accepts a + * string. There may be other logical conversions depending on the nature + * of the type. Make sure to add the conversion function to the + * TYPE_TO_CONVERSION_FUNCTION map in EsqlDataTypeConverter. Make sure the + * conversion functions have CSV tests that run against indexed data.
  • + *
  • + * Support the new type in aggregations that are type independent. + * This includes Values, Count, and Count Distinct. Make sure there are + * CSV tests against indexed data for these.
  • + *
  • + * Support other functions and aggregations as appropriate, making sure + * to included CSV tests.
  • + *
  • + * Consider how the type will interact with other types. For example, + * if the new type is numeric, it may be good for it to be comparable with + * other numbers. Supporting this may require new logic in + * EsqlDataTypeConverter#commonType, individual function type checking, the + * verifier rules, or other places. We suggest starting with CSV tests and + * seeing where they fail.
  • + *
+ * There are some additional steps that should be taken when removing the + * feature flag and getting ready for a release: + *
    + *
  • + * Ensure the capabilities for this type are always enabled + *
  • + *
  • + * Remove the type from the {@link DataType#UNDER_CONSTRUCTION} + * collection
  • + *
  • + * Fix new test failures related to declared function types + *
  • + *
  • + * Make sure to run the full test suite locally via gradle to generate + * the function type tables and helper files with the new type. Ensure all + * the functions that support the type have appropriate docs for it.
  • + *
  • + * If appropriate, remove the type from the ESQL limitations list of + * unsupported types.
  • + *
+ */ public enum DataType { /** * Fields of this type are unsupported by any functions and are always diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java index 47dadcbb11de..73e2d5ec626a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/EsField.java @@ -72,7 +72,7 @@ public EsField(StreamInput in) throws IOException { private DataType readDataType(StreamInput in) throws IOException { String name = readCachedStringWithVersionCheck(in); - if (in.getTransportVersion().before(TransportVersions.ESQL_NESTED_UNSUPPORTED) && name.equalsIgnoreCase("NESTED")) { + if (in.getTransportVersion().before(TransportVersions.V_8_16_0) && name.equalsIgnoreCase("NESTED")) { /* * The "nested" data type existed in older versions of ESQL but was * entirely used to filter mappings away. Those versions will still diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/CollectionUtils.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/CollectionUtils.java index 8bfcf4ca5c40..ce0540687121 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/CollectionUtils.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/CollectionUtils.java @@ -30,12 +30,8 @@ public static List combine(List left, List righ } List list = new ArrayList<>(left.size() + right.size()); - if (left.isEmpty() == false) { - list.addAll(left); - } - if (right.isEmpty() == false) { - list.addAll(right); - } + list.addAll(left); + list.addAll(right); return list; } @@ -73,13 +69,6 @@ public static List combine(Collection left, T... entries) { return list; } - public static int mapSize(int size) { - if (size < 2) { - return size + 1; - } - return (int) (size / 0.75f + 1f); - } - @SafeVarargs @SuppressWarnings("varargs") public static List nullSafeList(T... entries) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java index e8ccae342900..b570a50535a5 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java @@ -52,7 +52,7 @@ public interface PlanStreamInput { String readCachedString() throws IOException; static String readCachedStringWithVersionCheck(StreamInput planStreamInput) throws IOException { - if (planStreamInput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + if (planStreamInput.getTransportVersion().before(TransportVersions.V_8_16_0)) { return planStreamInput.readString(); } return ((PlanStreamInput) planStreamInput).readCachedString(); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java index fb4af33d2fd6..a5afcb5fa29a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java @@ -37,7 +37,7 @@ public interface PlanStreamOutput { void writeCachedString(String field) throws IOException; static void writeCachedStringWithVersionCheck(StreamOutput planStreamOutput, String string) throws IOException { - if (planStreamOutput.getTransportVersion().before(TransportVersions.ESQL_CACHED_STRING_SERIALIZATION)) { + if (planStreamOutput.getTransportVersion().before(TransportVersions.V_8_16_0)) { planStreamOutput.writeString(string); } else { ((PlanStreamOutput) planStreamOutput).writeCachedString(string); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java index 9338077a5557..f57f450c7ee3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java @@ -219,7 +219,7 @@ public Status(long aggregationNanos, long aggregationFinishNanos, int pagesProce protected Status(StreamInput in) throws IOException { aggregationNanos = in.readVLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_AGGREGATION_OPERATOR_STATUS_FINISH_NANOS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { aggregationFinishNanos = in.readOptionalVLong(); } else { aggregationFinishNanos = null; @@ -230,7 +230,7 @@ protected Status(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(aggregationNanos); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_AGGREGATION_OPERATOR_STATUS_FINISH_NANOS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalVLong(aggregationFinishNanos); } out.writeVInt(pagesProcessed); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index d98613f1817a..c071b5055df7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -79,7 +79,7 @@ public DriverProfile( } public DriverProfile(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_SLEEPS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.startMillis = in.readVLong(); this.stopMillis = in.readVLong(); } else { @@ -101,7 +101,7 @@ public DriverProfile(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE_SLEEPS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVLong(startMillis); out.writeVLong(stopMillis); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverSleeps.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverSleeps.java index 01e9a73c4fb5..d8856ebedb80 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverSleeps.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverSleeps.java @@ -76,7 +76,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws static final int RECORDS = 10; public static DriverSleeps read(StreamInput in) throws IOException { - if (in.getTransportVersion().before(TransportVersions.ESQL_PROFILE_SLEEPS)) { + if (in.getTransportVersion().before(TransportVersions.V_8_16_0)) { return empty(); } return new DriverSleeps( @@ -88,7 +88,7 @@ public static DriverSleeps read(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.ESQL_PROFILE_SLEEPS)) { + if (out.getTransportVersion().before(TransportVersions.V_8_16_0)) { return; } out.writeMap(counts, StreamOutput::writeVLong); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 712cadf6d44f..584cde55080e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -120,6 +120,19 @@ left:keyword | client_ip:keyword | right:keyword | env:keyword left | 172.21.0.5 | right | Development ; +lookupIPFromRowWithShadowingKeepReordered +required_capability: join_lookup_v4 + +ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" +| EVAL client_ip = client_ip::keyword +| LOOKUP JOIN clientips_lookup ON client_ip +| KEEP right, env, client_ip +; + +right:keyword | env:keyword | client_ip:keyword +right | Development | 172.21.0.5 +; + lookupIPFromIndex required_capability: join_lookup_v4 @@ -263,6 +276,24 @@ ignoreOrder:true; 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 | Success ; +lookupMessageFromIndexKeepReordered +required_capability: join_lookup_v4 + +FROM sample_data +| LOOKUP JOIN message_types_lookup ON message +| KEEP type, client_ip, event_duration, message +; + +type:keyword | client_ip:ip | event_duration:long | message:keyword +Success | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 +Error | 172.21.3.15 | 5033755 | Connection error +Error | 172.21.3.15 | 8268153 | Connection error +Error | 172.21.3.15 | 725448 | Connection error +Disconnected | 172.21.0.5 | 1232382 | Disconnected +Success | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 +Success | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 +; + lookupMessageFromIndexStats required_capability: join_lookup_v4 diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 147b13b36c44..00f53d31165b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.client.internal.ClusterAdminClient; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; @@ -1648,6 +1649,44 @@ public void testMaxTruncationSizeSetting() { } } + public void testScriptField() throws Exception { + XContentBuilder mapping = JsonXContent.contentBuilder(); + mapping.startObject(); + { + mapping.startObject("runtime"); + { + mapping.startObject("k1"); + mapping.field("type", "long"); + mapping.endObject(); + mapping.startObject("k2"); + mapping.field("type", "long"); + mapping.endObject(); + } + mapping.endObject(); + { + mapping.startObject("properties"); + mapping.startObject("meter").field("type", "double").endObject(); + mapping.endObject(); + } + } + mapping.endObject(); + String sourceMode = randomBoolean() ? "stored" : "synthetic"; + Settings.Builder settings = indexSettings(1, 0).put(indexSettings()).put("index.mapping.source.mode", sourceMode); + client().admin().indices().prepareCreate("test-script").setMapping(mapping).setSettings(settings).get(); + for (int i = 0; i < 10; i++) { + index("test-script", Integer.toString(i), Map.of("k1", i, "k2", "b-" + i, "meter", 10000 * i)); + } + refresh("test-script"); + try (EsqlQueryResponse resp = run("FROM test-script | SORT k1 | LIMIT 10")) { + List k1Column = Iterators.toList(resp.column(0)); + assertThat(k1Column, contains(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L)); + List k2Column = Iterators.toList(resp.column(1)); + assertThat(k2Column, contains(null, null, null, null, null, null, null, null, null, null)); + List meterColumn = Iterators.toList(resp.column(2)); + assertThat(meterColumn, contains(0.0, 10000.0, 20000.0, 30000.0, 40000.0, 50000.0, 60000.0, 70000.0, 80000.0, 90000.0)); + } + } + private void clearPersistentSettings(Setting... settings) { Settings.Builder clearedSettings = Settings.builder(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index ba7a7e826684..52170dfb0525 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -107,7 +107,7 @@ public EsqlExecutionInfo(StreamInput in) throws IOException { clusterList.forEach(c -> m.put(c.getClusterAlias(), c)); this.clusterInfo = m; } - if (in.getTransportVersion().onOrAfter(TransportVersions.OPT_IN_ESQL_CCS_EXECUTION_INFO)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.includeCCSMetadata = in.readBoolean(); } else { this.includeCCSMetadata = false; @@ -124,7 +124,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeCollection(Collections.emptyList()); } - if (out.getTransportVersion().onOrAfter(TransportVersions.OPT_IN_ESQL_CCS_EXECUTION_INFO)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeBoolean(includeCCSMetadata); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 77aed298baea..dc0e9fd1fb06 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -113,7 +113,7 @@ static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { } boolean columnar = in.readBoolean(); EsqlExecutionInfo executionInfo = null; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXECUTION_INFO)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { executionInfo = in.readOptionalWriteable(EsqlExecutionInfo::new); } return new EsqlQueryResponse(columns, pages, profile, columnar, asyncExecutionId, isRunning, isAsync, executionInfo); @@ -132,7 +132,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(profile); } out.writeBoolean(columnar); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXECUTION_INFO)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(executionInfo); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java index f7e6793fc4fb..f7fd991a9ef1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java @@ -58,7 +58,7 @@ void executeRemoteRequest( ActionListener remoteListener ) { remoteClient.getConnection(remoteRequest, remoteListener.delegateFailure((l, conn) -> { - var remoteAction = conn.getTransportVersion().onOrAfter(TransportVersions.ESQL_ORIGINAL_INDICES) + var remoteAction = conn.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? RESOLVE_REMOTE_TYPE : TransportFieldCapabilitiesAction.REMOTE_TYPE; remoteClient.execute(conn, remoteAction, remoteRequest, l); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index b847508d2b16..cf91c7df9a03 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -633,9 +633,10 @@ private Join resolveLookupJoin(LookupJoin join) { config = new JoinConfig(coreJoin, leftKeys, leftKeys, rightKeys); join = new LookupJoin(join.source(), join.left(), join.right(), config); - } - // everything else is unsupported for now - else { + } else if (type != JoinTypes.LEFT) { + // everything else is unsupported for now + // LEFT can only happen by being mapped from a USING above. So we need to exclude this as well because this rule can be run + // more than once. UnresolvedAttribute errorAttribute = new UnresolvedAttribute(join.source(), "unsupported", "Unsupported join type"); // add error message return join.withConfig(new JoinConfig(type, singletonList(errorAttribute), emptyList(), emptyList())); @@ -651,7 +652,7 @@ private List resolveUsingColumns(List cols, List"), enrichResolution); + this( + configuration, + functionRegistry, + indexResolution, + IndexResolution.invalid("AnalyzerContext constructed without any lookup join resolution"), + enrichResolution + ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 49d8a5ee8caa..f5fd82d742bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.analysis; -import org.elasticsearch.index.IndexMode; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; @@ -55,7 +54,8 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; -import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.stats.Metrics; @@ -172,20 +172,6 @@ else if (p instanceof Lookup lookup) { else { lookup.matchFields().forEach(unresolvedExpressions); } - } else if (p instanceof LookupJoin lj) { - // expect right side to always be a lookup index - lj.right().forEachUp(EsRelation.class, r -> { - if (r.indexMode() != IndexMode.LOOKUP) { - failures.add( - fail( - r, - "LOOKUP JOIN right side [{}] must be a lookup index (index_mode=lookup, not [{}]", - r.index().name(), - r.indexMode().getName() - ) - ); - } - }); } else { @@ -217,6 +203,7 @@ else if (p instanceof Lookup lookup) { checkSort(p, failures); checkFullTextQueryFunctions(p, failures); + checkJoin(p, failures); }); checkRemoteEnrich(plan, failures); checkMetadataScoreNameReserved(plan, failures); @@ -791,6 +778,35 @@ private static void checkNotPresentInDisjunctions( }); } + /** + * Checks Joins for invalid usage. + * + * @param plan root plan to check + * @param failures failures found + */ + private static void checkJoin(LogicalPlan plan, Set failures) { + if (plan instanceof Join join) { + JoinConfig config = join.config(); + for (int i = 0; i < config.leftFields().size(); i++) { + Attribute leftField = config.leftFields().get(i); + Attribute rightField = config.rightFields().get(i); + if (leftField.dataType() != rightField.dataType()) { + failures.add( + fail( + leftField, + "JOIN left field [{}] of type [{}] is incompatible with right field [{}] of type [{}]", + leftField.name(), + leftField.dataType(), + rightField.name(), + rightField.dataType() + ) + ); + } + } + + } + } + /** * Checks full text query functions for invalid usage. * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java index e891089aa55b..64595e776a96 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ResolvedEnrichPolicy.java @@ -35,8 +35,7 @@ public ResolvedEnrichPolicy(StreamInput in) throws IOException { } private static Reader getEsFieldReader(StreamInput in) { - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_2)) { return EsField::readFrom; } return EsField::new; @@ -56,8 +55,7 @@ public void writeTo(StreamOutput out) throws IOException { */ (o, v) -> { var field = new EsField(v.getName(), v.getDataType(), v.getProperties(), v.isAggregatable(), v.isAlias()); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_2)) { field.writeTo(o); } else { field.writeContent(o); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index d372eddb961a..089f6db373c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -81,8 +81,7 @@ private UnsupportedAttribute(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), readCachedStringWithVersionCheck(in), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2) ? EsField.readFrom(in) : new UnsupportedEsField(in), + in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_2) ? EsField.readFrom(in) : new UnsupportedEsField(in), in.readOptionalString(), NameId.readFrom((PlanStreamInput) in) ); @@ -93,8 +92,7 @@ public void writeTo(StreamOutput out) throws IOException { if (((PlanStreamOutput) out).writeAttributeCacheHeader(this)) { Source.EMPTY.writeTo(out); writeCachedStringWithVersionCheck(out, name()); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_2)) { field().writeTo(out); } else { field().writeContent(out); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index 87efccfc90ab..265b08de5556 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -53,10 +53,8 @@ protected AggregateFunction(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) - ? in.readNamedWriteable(Expression.class) - : Literal.TRUE, - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteable(Expression.class) : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteableCollectionAsList(Expression.class) : emptyList() ); @@ -66,7 +64,7 @@ protected AggregateFunction(StreamInput in) throws IOException { public final void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); out.writeNamedWriteable(field); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeNamedWriteable(filter); out.writeNamedWriteableCollection(parameters); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 2e45b1c1fe08..7436db9e00dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -147,10 +147,8 @@ private CountDistinct(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) - ? in.readNamedWriteable(Expression.class) - : Literal.TRUE, - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteable(Expression.class) : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteableCollectionAsList(Expression.class) : nullSafeList(in.readOptionalNamedWriteable(Expression.class)) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java index 0f9037a28d7d..a67b87c7617c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java @@ -58,10 +58,8 @@ private FromPartial(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) - ? in.readNamedWriteable(Expression.class) - : Literal.TRUE, - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteable(Expression.class) : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteableCollectionAsList(Expression.class).get(0) : in.readNamedWriteable(Expression.class) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index febd9f28b229..0d57267da1e2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -92,10 +92,8 @@ private Percentile(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) - ? in.readNamedWriteable(Expression.class) - : Literal.TRUE, - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteable(Expression.class) : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteableCollectionAsList(Expression.class).get(0) : in.readNamedWriteable(Expression.class) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index b7b04658f8d5..87ac9b77a682 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -74,10 +74,8 @@ public Rate(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) - ? in.readNamedWriteable(Expression.class) - : Literal.TRUE, - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteable(Expression.class) : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteableCollectionAsList(Expression.class) : nullSafeList(in.readNamedWriteable(Expression.class), in.readOptionalNamedWriteable(Expression.class)) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java index cffac616b3c8..a2856f60e4c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java @@ -80,10 +80,8 @@ private ToPartial(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) - ? in.readNamedWriteable(Expression.class) - : Literal.TRUE, - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteable(Expression.class) : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteableCollectionAsList(Expression.class).get(0) : in.readNamedWriteable(Expression.class) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java index e0a7da806b3a..40777b4d78dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java @@ -81,10 +81,8 @@ private Top(StreamInput in) throws IOException { super( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) - ? in.readNamedWriteable(Expression.class) - : Literal.TRUE, - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteable(Expression.class) : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteableCollectionAsList(Expression.class) : asList(in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java index dbcc50cea3b9..49c68d002440 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java @@ -68,10 +68,8 @@ private WeightedAvg(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) - ? in.readNamedWriteable(Expression.class) - : Literal.TRUE, - in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PER_AGGREGATE_FILTER) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteable(Expression.class) : Literal.TRUE, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readNamedWriteableCollectionAsList(Expression.class).get(0) : in.readNamedWriteable(Expression.class) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java index ce52b3a7611b..ee51a6f391a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java @@ -50,7 +50,7 @@ public void writeTo(StreamOutput out) throws IOException { @SuppressWarnings("unchecked") private static Map readIndexNameWithModes(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ADD_INDEX_MODE_CONCRETE_INDICES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { return in.readMap(IndexMode::readFrom); } else { Set indices = (Set) in.readGenericValue(); @@ -60,7 +60,7 @@ private static Map readIndexNameWithModes(StreamInput in) thr } private static void writeIndexNameWithModes(Map concreteIndices, StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ADD_INDEX_MODE_CONCRETE_INDICES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeMap(concreteIndices, (o, v) -> IndexMode.writeTo(v, out)); } else { out.writeGenericValue(concreteIndices.keySet()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 47e5b9acfbf9..948fd1c68354 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -182,8 +182,7 @@ public NameId mapNameId(long l) { @Override @SuppressWarnings("unchecked") public A readAttributeWithCache(CheckedFunction constructor) throws IOException { - if (getTransportVersion().onOrAfter(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION) - || getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { + if (getTransportVersion().onOrAfter(TransportVersions.V_8_15_2)) { // it's safe to cast to int, since the max value for this is {@link PlanStreamOutput#MAX_SERIALIZED_ATTRIBUTES} int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { @@ -222,8 +221,7 @@ private void cacheAttribute(int id, Attribute attr) { @SuppressWarnings("unchecked") public A readEsFieldWithCache() throws IOException { - if (getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { + if (getTransportVersion().onOrAfter(TransportVersions.V_8_15_2)) { // it's safe to cast to int, since the max value for this is {@link PlanStreamOutput#MAX_SERIALIZED_ATTRIBUTES} int cacheId = Math.toIntExact(readZLong()); if (cacheId < 0) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 615c4266620c..63d95c21d7d9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -154,8 +154,7 @@ public void writeCachedBlock(Block block) throws IOException { @Override public boolean writeAttributeCacheHeader(Attribute attribute) throws IOException { - if (getTransportVersion().onOrAfter(TransportVersions.ESQL_ATTRIBUTE_CACHED_SERIALIZATION) - || getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { + if (getTransportVersion().onOrAfter(TransportVersions.V_8_15_2)) { Integer cacheId = attributeIdFromCache(attribute); if (cacheId != null) { writeZLong(cacheId); @@ -186,8 +185,7 @@ private int cacheAttribute(Attribute attr) { @Override public boolean writeEsFieldCacheHeader(EsField field) throws IOException { - if (getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_FIELD_CACHED_SERIALIZATION) - || getTransportVersion().isPatchFrom(TransportVersions.V_8_15_2)) { + if (getTransportVersion().onOrAfter(TransportVersions.V_8_15_2)) { Integer cacheId = esFieldIdFromCache(field); if (cacheId != null) { writeZLong(cacheId); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java index dc32a4ad3c28..ed8851b64c27 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java @@ -11,14 +11,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; -import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.optimizer.rules.physical.ProjectAwayColumns; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.LeafExec; -import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.rule.Rule; @@ -102,25 +100,17 @@ private static Set missingAttributes(PhysicalPlan p) { var missing = new LinkedHashSet(); var input = p.inputSet(); - // For LOOKUP JOIN we only need field-extraction on left fields used to match, since the right side is always materialized - if (p instanceof LookupJoinExec join) { - join.leftFields().forEach(f -> { - if (input.contains(f) == false) { - missing.add(f); - } - }); - return missing; - } - - // collect field attributes used inside expressions - // TODO: Rather than going over all expressions manually, this should just call .references() - p.forEachExpression(TypedAttribute.class, f -> { + // Collect field attributes referenced by this plan but not yet present in the child's output. + // This is also correct for LookupJoinExec, where we only need field extraction on the left fields used to match, since the right + // side is always materialized. + p.references().forEach(f -> { if (f instanceof FieldAttribute || f instanceof MetadataAttribute) { if (input.contains(f) == false) { missing.add(f); } } }); + return missing; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java index dff55f073897..891d03c571b2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java @@ -85,7 +85,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeNamedWriteable(child()); out.writeNamedWriteableCollection(groupings()); out.writeNamedWriteableCollection(aggregates()); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_AGGREGATE_EXEC_TRACKS_INTERMEDIATE_ATTRS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeEnum(getMode()); out.writeNamedWriteableCollection(intermediateAttributes()); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index 7bf7d0e2d08e..17468f7afec1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -297,15 +298,11 @@ public SourceLoader newSourceLoader() { @Override public Query toQuery(QueryBuilder queryBuilder) { Query query = ctx.toQuery(queryBuilder).query(); - NestedLookup nestedLookup = ctx.nestedLookup(); - if (nestedLookup != NestedLookup.EMPTY) { - NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped); - if (nestedHelper.mightMatchNestedDocs(query)) { - // filter out nested documents - query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) - .add(newNonNestedFilter(ctx.indexVersionCreated()), BooleanClause.Occur.FILTER) - .build(); - } + if (ctx.nestedLookup() != NestedLookup.EMPTY && NestedHelper.mightMatchNestedDocs(query, ctx)) { + // filter out nested documents + query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) + .add(newNonNestedFilter(ctx.indexVersionCreated()), BooleanClause.Occur.FILTER) + .build(); } if (aliasFilter != AliasFilter.EMPTY) { Query filterQuery = ctx.toQuery(aliasFilter.getQueryBuilder()).query(); @@ -348,7 +345,16 @@ public MappedFieldType.FieldExtractPreference fieldExtractPreference() { @Override public SearchLookup lookup() { - return ctx.lookup(); + boolean syntheticSource = SourceFieldMapper.isSynthetic(indexSettings()); + var searchLookup = ctx.lookup(); + if (syntheticSource) { + // in the context of scripts and when synthetic source is used the search lookup can't always be reused between + // users of SearchLookup. This is only an issue when scripts fallback to _source, but since we can't always + // accurately determine whether a script uses _source, we should do this for all script usages. + // This lookup() method is only invoked for scripts / runtime fields, so it is ok to do here. + searchLookup = searchLookup.swapSourceProvider(ctx.createSourceProvider()); + } + return searchLookup; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 8c0488afdd42..b85340936497 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -565,21 +565,12 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(join.left(), context); - // TODO: The source builder includes incoming fields including the ones we're going to drop Layout.Builder layoutBuilder = source.layout.builder(); for (Attribute f : join.addedFields()) { layoutBuilder.append(f); } Layout layout = layoutBuilder.build(); - // TODO: this works when the join happens on the coordinator - /* - * But when it happens on the data node we get a - * \_FieldExtractExec[language_code{f}#15, language_name{f}#16]<[]> - * \_EsQueryExec[languages_lookup], indexMode[lookup], query[][_doc{f}#18], limit[], sort[] estimatedRowSize[62] - * Which we'd prefer not to do - at least for now. We already know the fields we're loading - * and don't want any local planning. - */ EsQueryExec localSourceExec = (EsQueryExec) join.lookup(); if (localSourceExec.indexMode() != IndexMode.LOOKUP) { throw new IllegalArgumentException("can't plan [" + join + "]"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java index 308192704fe0..8d2e092cd414 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeResponse.java @@ -61,7 +61,7 @@ final class ComputeResponse extends TransportResponse { } else { profiles = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXECUTION_INFO)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.took = in.readOptionalTimeValue(); this.totalShards = in.readVInt(); this.successfulShards = in.readVInt(); @@ -86,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(profiles); } } - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXECUTION_INFO)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalTimeValue(took); out.writeVInt(totalShards); out.writeVInt(successfulShards); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index c9c8635a60f5..ed037d24139f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -45,6 +45,7 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.lookup.SourceProvider; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; @@ -87,6 +88,7 @@ import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; @@ -471,12 +473,17 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, List contexts = new ArrayList<>(context.searchContexts.size()); for (int i = 0; i < context.searchContexts.size(); i++) { SearchContext searchContext = context.searchContexts.get(i); + var searchExecutionContext = new SearchExecutionContext(searchContext.getSearchExecutionContext()) { + + @Override + public SourceProvider createSourceProvider() { + final Supplier supplier = () -> super.createSourceProvider(); + return new ReinitializingSourceProvider(supplier); + + } + }; contexts.add( - new EsPhysicalOperationProviders.DefaultShardContext( - i, - searchContext.getSearchExecutionContext(), - searchContext.request().getAliasFilter() - ) + new EsPhysicalOperationProviders.DefaultShardContext(i, searchExecutionContext, searchContext.request().getAliasFilter()) ); } final List drivers; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java index 8f890e63bf54..4c01d326ed7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java @@ -81,7 +81,7 @@ final class DataNodeRequest extends TransportRequest implements IndicesRequest.R this.shardIds = in.readCollectionAsList(ShardId::new); this.aliasFilters = in.readMap(Index::new, AliasFilter::readFrom); this.plan = new PlanStreamInput(in, in.namedWriteableRegistry(), configuration).readNamedWriteable(PhysicalPlan.class); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ORIGINAL_INDICES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.indices = in.readStringArray(); this.indicesOptions = IndicesOptions.readIndicesOptions(in); } else { @@ -101,7 +101,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(shardIds); out.writeMap(aliasFilters); new PlanStreamOutput(out, configuration).writeNamedWriteable(plan); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ORIGINAL_INDICES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeStringArray(indices); indicesOptions.writeIndicesOptions(out); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ReinitializingSourceProvider.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ReinitializingSourceProvider.java new file mode 100644 index 000000000000..b6b2c6dfec75 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ReinitializingSourceProvider.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.search.lookup.SourceProvider; + +import java.io.IOException; +import java.util.function.Supplier; + +/** + * This is a workaround for when compute engine executes concurrently with data partitioning by docid. + */ +final class ReinitializingSourceProvider implements SourceProvider { + + private PerThreadSourceProvider perThreadProvider; + private final Supplier sourceProviderFactory; + + ReinitializingSourceProvider(Supplier sourceProviderFactory) { + this.sourceProviderFactory = sourceProviderFactory; + } + + @Override + public Source getSource(LeafReaderContext ctx, int doc) throws IOException { + var currentThread = Thread.currentThread(); + PerThreadSourceProvider provider = perThreadProvider; + if (provider == null || provider.creatingThread != currentThread) { + provider = new PerThreadSourceProvider(sourceProviderFactory.get(), currentThread); + this.perThreadProvider = provider; + } + return perThreadProvider.source.getSource(ctx, doc); + } + + private record PerThreadSourceProvider(SourceProvider source, Thread creatingThread) { + + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteClusterPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteClusterPlan.java index 031bfd7139a8..aed196f963e9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteClusterPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteClusterPlan.java @@ -23,7 +23,7 @@ static RemoteClusterPlan from(PlanStreamInput planIn) throws IOException { var plan = planIn.readNamedWriteable(PhysicalPlan.class); var targetIndices = planIn.readStringArray(); final OriginalIndices originalIndices; - if (planIn.getTransportVersion().onOrAfter(TransportVersions.ESQL_ORIGINAL_INDICES)) { + if (planIn.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { originalIndices = OriginalIndices.readOriginalIndices(planIn); } else { // fallback to the previous behavior @@ -35,7 +35,7 @@ static RemoteClusterPlan from(PlanStreamInput planIn) throws IOException { public void writeTo(PlanStreamOutput out) throws IOException { out.writeNamedWriteable(plan); out.writeStringArray(targetIndices); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ORIGINAL_INDICES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { OriginalIndices.writeOriginalIndices(originalIndices, out); } else { out.writeStringArray(originalIndices.indices()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java index 8d33e9b48059..bc11d246904d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java @@ -107,7 +107,7 @@ public static class Builder extends AbstractQueryBuilder { super(in); this.next = in.readNamedWriteable(QueryBuilder.class); this.field = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_SINGLE_VALUE_QUERY_SOURCE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { if (in instanceof PlanStreamInput psi) { this.source = Source.readFrom(psi); } else { @@ -128,7 +128,7 @@ public static class Builder extends AbstractQueryBuilder { protected void doWriteTo(StreamOutput out) throws IOException { out.writeNamedWriteable(next); out.writeString(field); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_SINGLE_VALUE_QUERY_SOURCE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { source.writeTo(out); } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { writeOldSource(out, source); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java index 4ec2746b24ee..997f3265803f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java @@ -101,7 +101,7 @@ public Configuration(BlockStreamInput in) throws IOException { } else { this.tables = Map.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXECUTION_INFO)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.queryStartTimeNanos = in.readLong(); } else { this.queryStartTimeNanos = -1; @@ -127,7 +127,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeMap(tables, (o1, columns) -> o1.writeMap(columns, StreamOutput::writeWriteable)); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXECUTION_INFO)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeLong(queryStartTimeNanos); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 71fba5683644..4f7c620bc8d1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -374,10 +374,11 @@ private void preAnalyzeLookupIndices(List indices, ListenerResult lis // call the EsqlResolveFieldsAction (field-caps) to resolve indices and get field types indexResolver.resolveAsMergedMapping( table.index(), - Set.of("*"), // Current LOOKUP JOIN syntax does not allow for field selection + Set.of("*"), // TODO: for LOOKUP JOIN, this currently declares all lookup index fields relevant and might fetch too many. null, listener.map(indexResolution -> listenerResult.withLookupIndexResolution(indexResolution)) ); + // TODO: Verify that the resolved index actually has indexMode: "lookup" } else { try { // No lookup indices specified diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java index a63ee53cdd49..4e89a09db9ed 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java @@ -38,7 +38,7 @@ public static Analyzer defaultAnalyzer() { } public static Analyzer expandedDefaultAnalyzer() { - return analyzer(analyzerExpandedDefaultMapping()); + return analyzer(expandedDefaultIndexResolution()); } public static Analyzer analyzer(IndexResolution indexResolution) { @@ -47,18 +47,33 @@ public static Analyzer analyzer(IndexResolution indexResolution) { public static Analyzer analyzer(IndexResolution indexResolution, Verifier verifier) { return new Analyzer( - new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), indexResolution, defaultEnrichResolution()), + new AnalyzerContext( + EsqlTestUtils.TEST_CFG, + new EsqlFunctionRegistry(), + indexResolution, + defaultLookupResolution(), + defaultEnrichResolution() + ), verifier ); } public static Analyzer analyzer(IndexResolution indexResolution, Verifier verifier, Configuration config) { - return new Analyzer(new AnalyzerContext(config, new EsqlFunctionRegistry(), indexResolution, defaultEnrichResolution()), verifier); + return new Analyzer( + new AnalyzerContext(config, new EsqlFunctionRegistry(), indexResolution, defaultLookupResolution(), defaultEnrichResolution()), + verifier + ); } public static Analyzer analyzer(Verifier verifier) { return new Analyzer( - new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), analyzerDefaultMapping(), defaultEnrichResolution()), + new AnalyzerContext( + EsqlTestUtils.TEST_CFG, + new EsqlFunctionRegistry(), + analyzerDefaultMapping(), + defaultLookupResolution(), + defaultEnrichResolution() + ), verifier ); } @@ -98,10 +113,14 @@ public static IndexResolution analyzerDefaultMapping() { return loadMapping("mapping-basic.json", "test"); } - public static IndexResolution analyzerExpandedDefaultMapping() { + public static IndexResolution expandedDefaultIndexResolution() { return loadMapping("mapping-default.json", "test"); } + public static IndexResolution defaultLookupResolution() { + return loadMapping("mapping-languages.json", "languages_lookup"); + } + public static EnrichResolution defaultEnrichResolution() { EnrichResolution enrichResolution = new EnrichResolution(); loadEnrichPolicyResolution(enrichResolution, MATCH_TYPE, "languages", "language_code", "languages_idx", "mapping-languages.json"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 5a1e109041a1..6edbb55af463 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.LoadMapping; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; @@ -73,6 +74,8 @@ import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzer; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzerDefaultMapping; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultEnrichResolution; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.tsdbIndexResolution; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; @@ -83,6 +86,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.matchesRegex; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; //@TestLogging(value = "org.elasticsearch.xpack.esql.analysis:TRACE", reason = "debug") @@ -2002,6 +2006,58 @@ public void testLookupMatchTypeWrong() { assertThat(e.getMessage(), containsString("column type mismatch, table column was [integer] and original column was [keyword]")); } + public void testLookupJoinUnknownIndex() { + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + + String errorMessage = "Unknown index [foobar]"; + IndexResolution missingLookupIndex = IndexResolution.invalid(errorMessage); + + Analyzer analyzerMissingLookupIndex = new Analyzer( + new AnalyzerContext( + EsqlTestUtils.TEST_CFG, + new EsqlFunctionRegistry(), + analyzerDefaultMapping(), + missingLookupIndex, + defaultEnrichResolution() + ), + TEST_VERIFIER + ); + + String query = "FROM test | LOOKUP JOIN foobar ON last_name"; + + VerificationException e = expectThrows(VerificationException.class, () -> analyze(query, analyzerMissingLookupIndex)); + assertThat(e.getMessage(), containsString("1:25: " + errorMessage)); + + String query2 = "FROM test | LOOKUP JOIN foobar ON missing_field"; + + e = expectThrows(VerificationException.class, () -> analyze(query2, analyzerMissingLookupIndex)); + assertThat(e.getMessage(), containsString("1:25: " + errorMessage)); + assertThat(e.getMessage(), not(containsString("[missing_field]"))); + } + + public void testLookupJoinUnknownField() { + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + + String query = "FROM test | LOOKUP JOIN languages_lookup ON last_name"; + String errorMessage = "1:45: Unknown column [last_name] in right side of join"; + + VerificationException e = expectThrows(VerificationException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString(errorMessage)); + + String query2 = "FROM test | LOOKUP JOIN languages_lookup ON language_code"; + String errorMessage2 = "1:45: Unknown column [language_code] in left side of join"; + + e = expectThrows(VerificationException.class, () -> analyze(query2)); + assertThat(e.getMessage(), containsString(errorMessage2)); + + String query3 = "FROM test | LOOKUP JOIN languages_lookup ON missing_altogether"; + String errorMessage3 = "1:45: Unknown column [missing_altogether] in "; + + e = expectThrows(VerificationException.class, () -> analyze(query3)); + assertThat(e.getMessage(), containsString(errorMessage3 + "left side of join")); + assertThat(e.getMessage(), containsString(errorMessage3 + "right side of join")); + } + public void testImplicitCasting() { var e = expectThrows(VerificationException.class, () -> analyze(""" from test | eval x = concat("2024", "-04", "-01") + 1 day diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 74e2de114172..882b8b7dbfd7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1926,6 +1926,17 @@ public void testSortByAggregate() { assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("FROM test | SORT count(*)")); } + public void testLookupJoinDataTypeMismatch() { + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + + query("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code"); + + assertEquals( + "1:87: JOIN left field [language_code] of type [KEYWORD] is incompatible with right field [language_code] of type [INTEGER]", + error("FROM test | EVAL language_code = languages::keyword | LOOKUP JOIN languages_lookup ON language_code") + ); + } + private void query(String query) { defaultAnalyzer.analyze(parser.createStatement(query)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java index 07ca112e8c52..3dfc0f611eb2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java @@ -156,11 +156,7 @@ protected ClusterComputeRequest mutateInstance(ClusterComputeRequest in) throws public void testFallbackIndicesOptions() throws Exception { ClusterComputeRequest request = createTestInstance(); - var version = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_8_14_0, - TransportVersions.ESQL_ORIGINAL_INDICES - ); + var version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_14_0, TransportVersions.V_8_16_0); ClusterComputeRequest cloned = copyInstance(request, version); assertThat(cloned.clusterAlias(), equalTo(request.clusterAlias())); assertThat(cloned.sessionId(), equalTo(request.sessionId())); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 48458bf4f508..3c14e51a3c2d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -68,6 +68,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.mapper.OffsetSourceFieldMapper; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankBuilder; @@ -392,7 +393,12 @@ public void close() { @Override public Map getMappers() { - return Map.of(SemanticTextFieldMapper.CONTENT_TYPE, SemanticTextFieldMapper.PARSER); + return Map.of( + SemanticTextFieldMapper.CONTENT_TYPE, + SemanticTextFieldMapper.PARSER, + OffsetSourceFieldMapper.CONTENT_TYPE, + OffsetSourceFieldMapper.PARSER + ); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java index def52e97666f..9d6f5bb89218 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkingSettings.java @@ -49,7 +49,7 @@ public SentenceBoundaryChunkingSettings(Integer maxChunkSize, @Nullable Integer public SentenceBoundaryChunkingSettings(StreamInput in) throws IOException { maxChunkSize = in.readInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.CHUNK_SENTENCE_OVERLAP_SETTING_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { sentenceOverlap = in.readVInt(); } } @@ -113,13 +113,13 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_CHUNKING_SETTINGS; + return TransportVersions.V_8_16_0; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeInt(maxChunkSize); - if (out.getTransportVersion().onOrAfter(TransportVersions.CHUNK_SENTENCE_OVERLAP_SETTING_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVInt(sentenceOverlap); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java index 7fb0fdc91bf7..7e0378d5b0cd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkingSettings.java @@ -104,7 +104,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_CHUNKING_SETTINGS; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceField.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceField.java new file mode 100644 index 000000000000..d8339f1004da --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceField.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.search.DocIdSetIterator; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Represents a {@link Field} that stores a {@link Term} along with its start and end offsets. + * Note: The {@link Charset} used to calculate these offsets is not associated with this field. + * It is the responsibility of the consumer to handle the appropriate {@link Charset}. + */ +public final class OffsetSourceField extends Field { + private static final FieldType FIELD_TYPE = new FieldType(); + + static { + FIELD_TYPE.setTokenized(false); + FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + } + + private int startOffset; + private int endOffset; + + public OffsetSourceField(String fieldName, String sourceFieldName, int startOffset, int endOffset) { + super(fieldName, sourceFieldName, FIELD_TYPE); + this.startOffset = startOffset; + this.endOffset = endOffset; + } + + public void setValues(String fieldName, int startOffset, int endOffset) { + this.fieldsData = fieldName; + this.startOffset = startOffset; + this.endOffset = endOffset; + } + + @Override + public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) { + OffsetTokenStream stream; + if (reuse instanceof OffsetTokenStream) { + stream = (OffsetTokenStream) reuse; + } else { + stream = new OffsetTokenStream(); + } + + stream.setValues((String) fieldsData, startOffset, endOffset); + return stream; + } + + public static OffsetSourceLoader loader(Terms terms) throws IOException { + return new OffsetSourceLoader(terms); + } + + private static final class OffsetTokenStream extends TokenStream { + private final CharTermAttribute termAttribute = addAttribute(CharTermAttribute.class); + private final OffsetAttribute offsetAttribute = addAttribute(OffsetAttribute.class); + private boolean used = true; + private String term = null; + private int startOffset = 0; + private int endOffset = 0; + + private OffsetTokenStream() {} + + /** Sets the values */ + void setValues(String term, int startOffset, int endOffset) { + this.term = term; + this.startOffset = startOffset; + this.endOffset = endOffset; + } + + @Override + public boolean incrementToken() { + if (used) { + return false; + } + clearAttributes(); + termAttribute.append(term); + offsetAttribute.setOffset(startOffset, endOffset); + used = true; + return true; + } + + @Override + public void reset() { + used = false; + } + + @Override + public void close() { + term = null; + } + } + + public static class OffsetSourceLoader { + private final Map postingsEnums = new LinkedHashMap<>(); + + private OffsetSourceLoader(Terms terms) throws IOException { + var termsEnum = terms.iterator(); + while (termsEnum.next() != null) { + var postings = termsEnum.postings(null, PostingsEnum.OFFSETS); + if (postings.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { + postingsEnums.put(termsEnum.term().utf8ToString(), postings); + } + } + } + + public OffsetSourceFieldMapper.OffsetSource advanceTo(int doc) throws IOException { + for (var it = postingsEnums.entrySet().iterator(); it.hasNext();) { + var entry = it.next(); + var postings = entry.getValue(); + if (postings.docID() < doc) { + if (postings.advance(doc) == DocIdSetIterator.NO_MORE_DOCS) { + it.remove(); + continue; + } + } + if (postings.docID() == doc) { + assert postings.freq() == 1; + postings.nextPosition(); + return new OffsetSourceFieldMapper.OffsetSource(entry.getKey(), postings.startOffset(), postings.endOffset()); + } + } + return null; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldMapper.java new file mode 100644 index 000000000000..e612076f1aaf --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldMapper.java @@ -0,0 +1,253 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Query; +import org.elasticsearch.index.fielddata.FieldDataContext; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.mapper.DocumentParserContext; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.TextSearchInfo; +import org.elasticsearch.index.mapper.ValueFetcher; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * A {@link FieldMapper} that maps a field name to its start and end offsets. + * The {@link CharsetFormat} used to compute the offsets is specified via the charset parameter. + * Currently, only {@link CharsetFormat#UTF_16} is supported, aligning with Java's {@code String} charset + * for simpler internal usage and integration. + * + * Each document can store at most one value in this field. + * + * Note: This mapper is not yet documented and is intended exclusively for internal use by + * {@link SemanticTextFieldMapper}. If exposing this mapper directly to users becomes necessary, + * extending charset compatibility should be considered, as the current default (and sole supported charset) + * was chosen for ease of Java integration. + */ +public class OffsetSourceFieldMapper extends FieldMapper { + public static final String CONTENT_TYPE = "offset_source"; + + private static final String SOURCE_NAME_FIELD = "field"; + private static final String START_OFFSET_FIELD = "start"; + private static final String END_OFFSET_FIELD = "end"; + + public record OffsetSource(String field, int start, int end) implements ToXContentObject { + public OffsetSource { + if (start < 0 || end < 0) { + throw new IllegalArgumentException("Illegal offsets, expected positive numbers, got: " + start + ":" + end); + } + if (start > end) { + throw new IllegalArgumentException("Illegal offsets, expected start < end, got: " + start + " > " + end); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(SOURCE_NAME_FIELD, field); + builder.field(START_OFFSET_FIELD, start); + builder.field(END_OFFSET_FIELD, end); + return builder.endObject(); + } + } + + private static final ConstructingObjectParser OFFSET_SOURCE_PARSER = new ConstructingObjectParser<>( + CONTENT_TYPE, + true, + args -> new OffsetSource((String) args[0], (int) args[1], (int) args[2]) + ); + + static { + OFFSET_SOURCE_PARSER.declareString(constructorArg(), new ParseField(SOURCE_NAME_FIELD)); + OFFSET_SOURCE_PARSER.declareInt(constructorArg(), new ParseField(START_OFFSET_FIELD)); + OFFSET_SOURCE_PARSER.declareInt(constructorArg(), new ParseField(END_OFFSET_FIELD)); + } + + public enum CharsetFormat { + UTF_16(StandardCharsets.UTF_16); + + private Charset charSet; + + CharsetFormat(Charset charSet) { + this.charSet = charSet; + } + } + + public static class Builder extends FieldMapper.Builder { + private final Parameter charset = Parameter.enumParam( + "charset", + false, + i -> CharsetFormat.UTF_16, + CharsetFormat.UTF_16, + CharsetFormat.class + ); + private final Parameter> meta = Parameter.metaParam(); + + public Builder(String name) { + super(name); + } + + @Override + protected Parameter[] getParameters() { + return new Parameter[] { meta, charset }; + } + + @Override + public OffsetSourceFieldMapper build(MapperBuilderContext context) { + return new OffsetSourceFieldMapper( + leafName(), + new OffsetSourceFieldType(context.buildFullName(leafName()), charset.get(), meta.getValue()), + builderParams(this, context) + ); + } + } + + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n)); + + public static final class OffsetSourceFieldType extends MappedFieldType { + private final CharsetFormat charset; + + public OffsetSourceFieldType(String name, CharsetFormat charset, Map meta) { + super(name, true, false, false, TextSearchInfo.NONE, meta); + this.charset = charset; + } + + public Charset getCharset() { + return charset.charSet; + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public boolean fieldHasValue(FieldInfos fieldInfos) { + return fieldInfos.fieldInfo(name()) != null; + } + + @Override + public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { + throw new IllegalArgumentException("[offset_source] fields do not support sorting, scripting or aggregating"); + } + + @Override + public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + return new ValueFetcher() { + OffsetSourceField.OffsetSourceLoader offsetLoader; + + @Override + public void setNextReader(LeafReaderContext context) { + try { + var terms = context.reader().terms(name()); + offsetLoader = terms != null ? OffsetSourceField.loader(terms) : null; + } catch (IOException exc) { + throw new UncheckedIOException(exc); + } + } + + @Override + public List fetchValues(Source source, int doc, List ignoredValues) throws IOException { + var offsetSource = offsetLoader != null ? offsetLoader.advanceTo(doc) : null; + return offsetSource != null ? List.of(offsetSource) : null; + } + + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NO_REQUIREMENTS; + } + }; + } + + @Override + public Query termQuery(Object value, SearchExecutionContext context) { + throw new IllegalArgumentException("Queries on [offset_source] fields are not supported"); + } + + @Override + public boolean isSearchable() { + return false; + } + } + + /** + * @param simpleName the leaf name of the mapper + * @param mappedFieldType + * @param params initialization params for this field mapper + */ + protected OffsetSourceFieldMapper(String simpleName, MappedFieldType mappedFieldType, BuilderParams params) { + super(simpleName, mappedFieldType, params); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + protected boolean supportsParsingObject() { + return true; + } + + @Override + protected void parseCreateField(DocumentParserContext context) throws IOException { + var parser = context.parser(); + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + // skip + return; + } + + if (context.doc().getByKey(fullPath()) != null) { + throw new IllegalArgumentException( + "[offset_source] fields do not support indexing multiple values for the same field [" + + fullPath() + + "] in the same document" + ); + } + + // make sure that we don't expand dots in field names while parsing + boolean isWithinLeafObject = context.path().isWithinLeafObject(); + context.path().setWithinLeafObject(true); + try { + var offsetSource = OFFSET_SOURCE_PARSER.parse(parser, null); + context.doc() + .addWithKey( + fieldType().name(), + new OffsetSourceField(fullPath(), offsetSource.field, offsetSource.start, offsetSource.end) + ); + context.addToFieldNames(fieldType().name()); + } finally { + context.path().setWithinLeafObject(isWithinLeafObject); + } + } + + @Override + public FieldMapper.Builder getMergeBuilder() { + return new Builder(leafName()).init(this); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/random/RandomRankBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/random/RandomRankBuilder.java index fdb5503e491e..15d41301d0a3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/random/RandomRankBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/random/RandomRankBuilder.java @@ -85,7 +85,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.RANDOM_RERANKER_RETRIEVER; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java index d208623e5332..7ad3e8eea053 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java @@ -98,6 +98,6 @@ protected void doToXContent(XContentBuilder builder, Params params) throws IOExc @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.TEXT_SIMILARITY_RERANKER_QUERY_REWRITE; + return TransportVersions.V_8_16_0; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index c239319b6283..fd2427dc8ac6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -129,7 +129,10 @@ public TextSimilarityRankRetrieverBuilder( } @Override - protected TextSimilarityRankRetrieverBuilder clone(List newChildRetrievers) { + protected TextSimilarityRankRetrieverBuilder clone( + List newChildRetrievers, + List newPreFilterQueryBuilders + ) { return new TextSimilarityRankRetrieverBuilder( newChildRetrievers, inferenceId, @@ -138,7 +141,7 @@ protected TextSimilarityRankRetrieverBuilder clone(List newChil rankWindowSize, minScore, retrieverName, - preFilterQueryBuilders + newPreFilterQueryBuilders ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index d7ac7caed7ef..5adc2a11b19d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -359,7 +359,7 @@ public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } public static class Configuration { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceSettings.java index 3500bdf814e1..f6ddac34a2b2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceSettings.java @@ -163,7 +163,7 @@ public ToXContentObject getFilteredXContentObject() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettings.java index 631ec8a8648e..a299cf5b655c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionServiceSettings.java @@ -74,7 +74,7 @@ public ToXContentObject getFilteredXContentObject() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettings.java index 05b5873a81d8..7883e7b1d90d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettings.java @@ -115,7 +115,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsServiceSettings.java index 8896e983d3e7..8f40ce2a8b8b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsServiceSettings.java @@ -135,7 +135,7 @@ public ToXContentObject getFilteredXContentObject() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettings.java index 9a431717d9fb..a08ca6cce66d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/embeddings/AlibabaCloudSearchEmbeddingsTaskSettings.java @@ -151,7 +151,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankServiceSettings.java index 42c7238aefa7..40e645074f61 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankServiceSettings.java @@ -74,7 +74,7 @@ public ToXContentObject getFilteredXContentObject() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankTaskSettings.java index 40c3dee00d6c..2a7806f4beab 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/rerank/AlibabaCloudSearchRerankTaskSettings.java @@ -85,7 +85,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseServiceSettings.java index fe44c936c4e6..0a55d2aba6ce 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseServiceSettings.java @@ -74,7 +74,7 @@ public ToXContentObject getFilteredXContentObject() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettings.java index 0f4ebce92016..17c5b178c2a1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/sparse/AlibabaCloudSearchSparseTaskSettings.java @@ -164,7 +164,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java index a3d2483a068e..78178466f9f3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java @@ -92,7 +92,7 @@ public CohereRerankServiceSettings(@Nullable String url, @Nullable String modelI public CohereRerankServiceSettings(StreamInput in) throws IOException { this.uri = createOptionalUri(in.readOptionalString()); - if (in.getTransportVersion().before(TransportVersions.ML_INFERENCE_COHERE_UNUSED_RERANK_SETTINGS_REMOVED)) { + if (in.getTransportVersion().before(TransportVersions.V_8_16_0)) { // An older node sends these fields, so we need to skip them to progress through the serialized data in.readOptionalEnum(SimilarityMeasure.class); in.readOptionalVInt(); @@ -162,7 +162,7 @@ public void writeTo(StreamOutput out) throws IOException { var uriToWrite = uri != null ? uri.toString() : null; out.writeOptionalString(uriToWrite); - if (out.getTransportVersion().before(TransportVersions.ML_INFERENCE_COHERE_UNUSED_RERANK_SETTINGS_REMOVED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_16_0)) { // An old node expects this data to be present, so we need to send at least the booleans // indicating that the fields are not set out.writeOptionalEnum(null); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 1f08c06edaa9..b256861e7dd2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -229,7 +229,7 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_EIS_INTEGRATION_ADDED; + return TransportVersions.V_8_16_0; } private ElasticInferenceServiceModel createModelFromPersistent( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java index bbda1bb71679..3af404aeef36 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java @@ -113,7 +113,7 @@ public RateLimitSettings rateLimitSettings() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_EIS_INTEGRATION_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index 962c939146ef..244108edc3dd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -157,19 +157,17 @@ public ElasticsearchInternalServiceSettings(ElasticsearchInternalServiceSettings } public ElasticsearchInternalServiceSettings(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.numAllocations = in.readOptionalVInt(); } else { this.numAllocations = in.readVInt(); } this.numThreads = in.readVInt(); this.modelId = in.readString(); - this.adaptiveAllocationsSettings = in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS) + this.adaptiveAllocationsSettings = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readOptionalWriteable(AdaptiveAllocationsSettings::new) : null; - this.deploymentId = in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT) - ? in.readOptionalString() - : null; + this.deploymentId = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readOptionalString() : null; } public void setNumAllocations(Integer numAllocations) { @@ -178,17 +176,15 @@ public void setNumAllocations(Integer numAllocations) { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalVInt(getNumAllocations()); } else { out.writeVInt(getNumAllocations()); } out.writeVInt(getNumThreads()); out.writeString(modelId()); - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_ADAPTIVE_ALLOCATIONS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalWriteable(getAdaptiveAllocationsSettings()); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ATTACH_TO_EXISTSING_DEPLOYMENT)) { out.writeOptionalString(deploymentId); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index ea263fb77a2d..981a3e95808e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -223,7 +223,7 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_IBM_WATSONX_EMBEDDINGS_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsServiceSettings.java index 53d5c6c8bb5e..3a9625aef31c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsServiceSettings.java @@ -207,7 +207,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_IBM_WATSONX_EMBEDDINGS_ADDED; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldMapperTests.java new file mode 100644 index 000000000000..40140d6da5eb --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldMapperTests.java @@ -0,0 +1,216 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentParsingException; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperTestCase; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.ValueFetcher; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.search.lookup.SourceProvider; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.InferencePlugin; +import org.junit.AssumptionViolatedException; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OffsetSourceFieldMapperTests extends MapperTestCase { + @Override + protected Collection getPlugins() { + return List.of(new InferencePlugin(Settings.EMPTY)); + } + + @Override + protected void minimalMapping(XContentBuilder b) throws IOException { + b.field("type", "offset_source"); + } + + @Override + protected Object getSampleValueForDocument() { + return getSampleObjectForDocument(); + } + + @Override + protected Object getSampleObjectForDocument() { + return Map.of("field", "foo", "start", 100, "end", 300); + } + + @Override + protected Object generateRandomInputValue(MappedFieldType ft) { + return new OffsetSourceFieldMapper.OffsetSource("field", randomIntBetween(0, 100), randomIntBetween(101, 1000)); + } + + @Override + protected IngestScriptSupport ingestScriptSupport() { + throw new AssumptionViolatedException("not supported"); + } + + @Override + protected void registerParameters(ParameterChecker checker) throws IOException {} + + @Override + protected void assertSearchable(MappedFieldType fieldType) { + assertFalse(fieldType.isSearchable()); + } + + @Override + protected boolean supportsStoredFields() { + return false; + } + + @Override + protected boolean supportsEmptyInputArray() { + return false; + } + + @Override + protected boolean supportsCopyTo() { + return false; + } + + @Override + protected boolean supportsIgnoreMalformed() { + return false; + } + + @Override + protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { + return new SyntheticSourceSupport() { + @Override + public SyntheticSourceExample example(int maxValues) { + return new SyntheticSourceExample(getSampleValueForDocument(), getSampleValueForDocument(), null, b -> minimalMapping(b)); + } + + @Override + public List invalidExample() { + return List.of(); + } + }; + } + + @Override + public void testSyntheticSourceKeepArrays() { + // This mapper doesn't support multiple values (array of objects). + } + + public void testDefaults() throws Exception { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); + + ParsedDocument doc1 = mapper.parse( + source(b -> b.startObject("field").field("field", "foo").field("start", 0).field("end", 128).endObject()) + ); + List fields = doc1.rootDoc().getFields("field"); + assertEquals(1, fields.size()); + assertThat(fields.get(0), instanceOf(OffsetSourceField.class)); + OffsetSourceField offsetField1 = (OffsetSourceField) fields.get(0); + + ParsedDocument doc2 = mapper.parse( + source(b -> b.startObject("field").field("field", "bar").field("start", 128).field("end", 512).endObject()) + ); + OffsetSourceField offsetField2 = (OffsetSourceField) doc2.rootDoc().getFields("field").get(0); + + assertTokenStream(offsetField1.tokenStream(null, null), "foo", 0, 128); + assertTokenStream(offsetField2.tokenStream(null, null), "bar", 128, 512); + } + + private void assertTokenStream(TokenStream tk, String expectedTerm, int expectedStartOffset, int expectedEndOffset) throws IOException { + CharTermAttribute termAttribute = tk.addAttribute(CharTermAttribute.class); + OffsetAttribute offsetAttribute = tk.addAttribute(OffsetAttribute.class); + tk.reset(); + assertTrue(tk.incrementToken()); + assertThat(new String(termAttribute.buffer(), 0, termAttribute.length()), equalTo(expectedTerm)); + assertThat(offsetAttribute.startOffset(), equalTo(expectedStartOffset)); + assertThat(offsetAttribute.endOffset(), equalTo(expectedEndOffset)); + assertFalse(tk.incrementToken()); + } + + @Override + protected void assertFetch(MapperService mapperService, String field, Object value, String format) throws IOException { + MappedFieldType ft = mapperService.fieldType(field); + MappedFieldType.FielddataOperation fdt = MappedFieldType.FielddataOperation.SEARCH; + SourceToParse source = source(b -> b.field(ft.name(), value)); + SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); + when(searchExecutionContext.isSourceEnabled()).thenReturn(true); + when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); + when(searchExecutionContext.getForField(ft, fdt)).thenAnswer(inv -> fieldDataLookup(mapperService).apply(ft, () -> { + throw new UnsupportedOperationException(); + }, fdt)); + ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format); + ParsedDocument doc = mapperService.documentMapper().parse(source); + withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> { + Source s = SourceProvider.fromStoredFields().getSource(ir.leaves().get(0), 0); + nativeFetcher.setNextReader(ir.leaves().get(0)); + List fromNative = nativeFetcher.fetchValues(s, 0, new ArrayList<>()); + assertThat(fromNative.size(), equalTo(1)); + assertThat("fetching " + value, fromNative.get(0), equalTo(value)); + }); + } + + @Override + protected void assertFetchMany(MapperService mapperService, String field, Object value, String format, int count) throws IOException { + assumeFalse("[offset_source] currently don't support multiple values in the same field", false); + } + + public void testInvalidCharset() { + var exc = expectThrows(Exception.class, () -> createDocumentMapper(mapping(b -> { + b.startObject("field").field("type", "offset_source").field("charset", "utf_8").endObject(); + }))); + assertThat(exc.getCause().getMessage(), containsString("Unknown value [utf_8] for field [charset]")); + } + + public void testRejectMultiValuedFields() throws IOException { + DocumentMapper mapper = createDocumentMapper(mapping(b -> { b.startObject("field").field("type", "offset_source").endObject(); })); + + DocumentParsingException exc = expectThrows(DocumentParsingException.class, () -> mapper.parse(source(b -> { + b.startArray("field"); + { + b.startObject().field("field", "bar1").field("start", 128).field("end", 512).endObject(); + b.startObject().field("field", "bar2").field("start", 128).field("end", 512).endObject(); + } + b.endArray(); + }))); + assertThat(exc.getCause().getMessage(), containsString("[offset_source] fields do not support indexing multiple values")); + } + + public void testInvalidOffsets() throws IOException { + DocumentMapper mapper = createDocumentMapper(mapping(b -> { b.startObject("field").field("type", "offset_source").endObject(); })); + + DocumentParsingException exc = expectThrows(DocumentParsingException.class, () -> mapper.parse(source(b -> { + b.startArray("field"); + { + b.startObject().field("field", "bar1").field("start", -1).field("end", 512).endObject(); + } + b.endArray(); + }))); + assertThat(exc.getCause().getCause().getCause().getMessage(), containsString("Illegal offsets")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldTests.java new file mode 100644 index 000000000000..4d86263e446f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.apache.lucene.document.Document; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.test.ESTestCase; + +public class OffsetSourceFieldTests extends ESTestCase { + public void testBasics() throws Exception { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter( + random(), + dir, + newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())) + ); + Document doc = new Document(); + OffsetSourceField field1 = new OffsetSourceField("field1", "foo", 1, 10); + doc.add(field1); + writer.addDocument(doc); + + field1.setValues("bar", 10, 128); + writer.addDocument(doc); + + writer.addDocument(new Document()); // gap + + field1.setValues("foo", 50, 256); + writer.addDocument(doc); + + writer.addDocument(new Document()); // double gap + writer.addDocument(new Document()); + + field1.setValues("baz", 32, 512); + writer.addDocument(doc); + + writer.forceMerge(1); + var reader = writer.getReader(); + writer.close(); + + var searcher = newSearcher(reader); + var context = searcher.getIndexReader().leaves().get(0); + + var terms = context.reader().terms("field1"); + assertNotNull(terms); + OffsetSourceField.OffsetSourceLoader loader = OffsetSourceField.loader(terms); + + var offset = loader.advanceTo(0); + assertEquals(new OffsetSourceFieldMapper.OffsetSource("foo", 1, 10), offset); + + offset = loader.advanceTo(1); + assertEquals(new OffsetSourceFieldMapper.OffsetSource("bar", 10, 128), offset); + + assertNull(loader.advanceTo(2)); + + offset = loader.advanceTo(3); + assertEquals(new OffsetSourceFieldMapper.OffsetSource("foo", 50, 256), offset); + + offset = loader.advanceTo(6); + assertEquals(new OffsetSourceFieldMapper.OffsetSource("baz", 32, 512), offset); + + assertNull(loader.advanceTo(189)); + + IOUtils.close(reader, dir); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldTypeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldTypeTests.java new file mode 100644 index 000000000000..ccb696515a06 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/OffsetSourceFieldTypeTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mapper; + +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedFieldType; + +import java.util.Collections; + +public class OffsetSourceFieldTypeTests extends FieldTypeTestCase { + public void testIsNotAggregatable() { + MappedFieldType fieldType = getMappedFieldType(); + assertFalse(fieldType.isAggregatable()); + } + + @Override + public void testFieldHasValue() { + MappedFieldType fieldType = getMappedFieldType(); + FieldInfos fieldInfos = new FieldInfos(new FieldInfo[] { getFieldInfoWithName(fieldType.name()) }); + assertTrue(fieldType.fieldHasValue(fieldInfos)); + } + + @Override + public void testFieldHasValueWithEmptyFieldInfos() { + MappedFieldType fieldType = getMappedFieldType(); + assertFalse(fieldType.fieldHasValue(FieldInfos.EMPTY)); + } + + @Override + public MappedFieldType getMappedFieldType() { + return new OffsetSourceFieldMapper.OffsetSourceFieldType( + "field", + OffsetSourceFieldMapper.CharsetFormat.UTF_16, + Collections.emptyMap() + ); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index 2bf8b00cf551..ef9480681f55 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.Request; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; @@ -17,6 +19,7 @@ import org.junit.ClassRule; import java.io.IOException; +import java.time.Instant; import java.util.List; import java.util.Map; @@ -108,4 +111,118 @@ public void testLogsdbSourceModeForLogsIndex() throws IOException { assertNull(settings.get("index.mapping.source.mode")); } + public void testEsqlRuntimeFields() throws IOException { + String mappings = """ + { + "runtime": { + "message_length": { + "type": "long" + }, + "log.offset": { + "type": "long" + } + }, + "dynamic": false, + "properties": { + "@timestamp": { + "type": "date" + }, + "log" : { + "properties": { + "level": { + "type": "keyword" + }, + "file": { + "type": "keyword" + } + } + } + } + } + """; + String indexName = "test-foo"; + createIndex(indexName, Settings.builder().put("index.mode", "logsdb").build(), mappings); + + int numDocs = 500; + var sb = new StringBuilder(); + var now = Instant.now(); + + var expectedMinTimestamp = now; + for (int i = 0; i < numDocs; i++) { + String level = randomBoolean() ? "info" : randomBoolean() ? "warning" : randomBoolean() ? "error" : "fatal"; + String msg = randomAlphaOfLength(20); + String path = randomAlphaOfLength(8); + String messageLength = Integer.toString(msg.length()); + String offset = Integer.toString(randomNonNegativeInt()); + sb.append("{ \"create\": {} }").append('\n'); + if (randomBoolean()) { + sb.append( + """ + {"@timestamp":"$now","message":"$msg","message_length":$l,"file":{"level":"$level","offset":5,"file":"$path"}} + """.replace("$now", formatInstant(now)) + .replace("$level", level) + .replace("$msg", msg) + .replace("$path", path) + .replace("$l", messageLength) + .replace("$o", offset) + ); + } else { + sb.append(""" + {"@timestamp": "$now", "message": "$msg", "message_length": $l} + """.replace("$now", formatInstant(now)).replace("$msg", msg).replace("$l", messageLength)); + } + sb.append('\n'); + if (i != numDocs - 1) { + now = now.plusSeconds(1); + } + } + var expectedMaxTimestamp = now; + + var bulkRequest = new Request("POST", "/" + indexName + "/_bulk"); + bulkRequest.setJsonEntity(sb.toString()); + bulkRequest.addParameter("refresh", "true"); + var bulkResponse = client().performRequest(bulkRequest); + var bulkResponseBody = responseAsMap(bulkResponse); + assertThat(bulkResponseBody, Matchers.hasEntry("errors", false)); + + var forceMergeRequest = new Request("POST", "/" + indexName + "/_forcemerge"); + forceMergeRequest.addParameter("max_num_segments", "1"); + var forceMergeResponse = client().performRequest(forceMergeRequest); + assertOK(forceMergeResponse); + + String query = "FROM test-foo | STATS count(*), min(@timestamp), max(@timestamp), min(message_length), max(message_length)" + + " ,sum(message_length), avg(message_length), min(log.offset), max(log.offset) | LIMIT 1"; + final Request esqlRequest = new Request("POST", "/_query"); + esqlRequest.setJsonEntity(""" + { + "query": "$query" + } + """.replace("$query", query)); + var esqlResponse = client().performRequest(esqlRequest); + assertOK(esqlResponse); + Map esqlResponseBody = responseAsMap(esqlResponse); + + List values = (List) esqlResponseBody.get("values"); + assertThat(values, Matchers.not(Matchers.empty())); + var count = ((List) values.getFirst()).get(0); + assertThat(count, equalTo(numDocs)); + logger.warn("VALUES: {}", values); + + var minTimestamp = ((List) values.getFirst()).get(1); + assertThat(minTimestamp, equalTo(formatInstant(expectedMinTimestamp))); + var maxTimestamp = ((List) values.getFirst()).get(2); + assertThat(maxTimestamp, equalTo(formatInstant(expectedMaxTimestamp))); + + var minLength = ((List) values.getFirst()).get(3); + assertThat(minLength, equalTo(20)); + var maxLength = ((List) values.getFirst()).get(4); + assertThat(maxLength, equalTo(20)); + var sumLength = ((List) values.getFirst()).get(5); + assertThat(sumLength, equalTo(20 * numDocs)); + } + + static String formatInstant(Instant instant) { + return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); + } + } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java index 26a672fb1c90..e629f9b3998b 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java @@ -29,7 +29,7 @@ final class SyntheticSourceLicenseService { // You can only override this property if you received explicit approval from Elastic. static final String CUTOFF_DATE_SYS_PROP_NAME = "es.mapping.synthetic_source_fallback_to_stored_source.cutoff_date_restricted_override"; private static final Logger LOGGER = LogManager.getLogger(SyntheticSourceLicenseService.class); - static final long DEFAULT_CUTOFF_DATE = LocalDateTime.of(2025, 2, 1, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + static final long DEFAULT_CUTOFF_DATE = LocalDateTime.of(2025, 2, 4, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); /** * A setting that determines whether source mode should always be stored source. Regardless of licence. diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java index 890bc464a257..f8f307b572f3 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java @@ -69,7 +69,8 @@ public void testSyntheticSourceUsageWithLegacyLicense() { } public void testSyntheticSourceUsageWithLegacyLicensePastCutoff() throws Exception { - long startPastCutoff = LocalDateTime.of(2025, 11, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + // One day after default cutoff date + long startPastCutoff = LocalDateTime.of(2025, 2, 5, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); putLicense(createGoldOrPlatinumLicense(startPastCutoff)); ensureGreen(); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java index eda0d8786874..c871a7d0216e 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java @@ -98,7 +98,7 @@ public void testGetAdditionalIndexSettingsTsdb() throws IOException { } public void testGetAdditionalIndexSettingsTsdbAfterCutoffDate() throws Exception { - long start = LocalDateTime.of(2025, 2, 2, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + long start = LocalDateTime.of(2025, 2, 5, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); License license = createGoldOrPlatinumLicense(start); long time = LocalDateTime.of(2024, 12, 31, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); var licenseState = new XPackLicenseState(() -> time, new XPackLicenseStatus(license.operationMode(), true, null)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java index 46edcf1f63c0..b59ef0c40e4f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java @@ -304,7 +304,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.LTR_SERVERLESS_RELEASE; + return TransportVersions.V_8_16_0; } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypes.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypes.java index 6aa47f7c817a..c67d943b11e2 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypes.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypes.java @@ -112,41 +112,21 @@ public static DataType fromEs(String name) { } public static DataType fromJava(Object value) { - if (value == null) { - return NULL; - } - if (value instanceof Integer) { - return INTEGER; - } - if (value instanceof Long) { - return LONG; - } - if (value instanceof BigInteger) { - return UNSIGNED_LONG; - } - if (value instanceof Boolean) { - return BOOLEAN; - } - if (value instanceof Double) { - return DOUBLE; - } - if (value instanceof Float) { - return FLOAT; - } - if (value instanceof Byte) { - return BYTE; - } - if (value instanceof Short) { - return SHORT; - } - if (value instanceof ZonedDateTime) { - return DATETIME; - } - if (value instanceof String || value instanceof Character) { - return KEYWORD; - } - - return null; + return switch (value) { + case null -> NULL; + case Integer i -> INTEGER; + case Long l -> LONG; + case BigInteger bigInteger -> UNSIGNED_LONG; + case Boolean b -> BOOLEAN; + case Double v -> DOUBLE; + case Float v -> FLOAT; + case Byte b -> BYTE; + case Short s -> SHORT; + case ZonedDateTime zonedDateTime -> DATETIME; + case String s -> KEYWORD; + case Character c -> KEYWORD; + default -> null; + }; } public static boolean isUnsupported(DataType from) { diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java index 37e1807d138a..ae35153b6f39 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.search.vectors.QueryVectorBuilder; +import org.elasticsearch.search.vectors.TestQueryVectorBuilderPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.xcontent.XContentBuilder; @@ -57,7 +58,6 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { protected static String INDEX = "test_index"; - protected static final String ID_FIELD = "_id"; protected static final String DOC_FIELD = "doc"; protected static final String TEXT_FIELD = "text"; protected static final String VECTOR_FIELD = "vector"; @@ -743,6 +743,42 @@ public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder expectThrows(UnsupportedOperationException.class, () -> client().prepareSearch(INDEX).setSource(source).get()); } + public void testRRFFiltersPropagatedToKnnQueryVectorBuilder() { + final int rankWindowSize = 100; + final int rankConstant = 10; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this will retriever all but 7 only due to top-level filter + StandardRetrieverBuilder standardRetriever = new StandardRetrieverBuilder(QueryBuilders.matchAllQuery()); + // this will too retrieve just doc 7 + KnnRetrieverBuilder knnRetriever = new KnnRetrieverBuilder( + "vector", + null, + new TestQueryVectorBuilderPlugin.TestQueryVectorBuilder(new float[] { 3 }), + 10, + 10, + null + ); + source.retriever( + new RRFRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standardRetriever, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetriever, null) + ), + rankWindowSize, + rankConstant + ) + ); + source.retriever().getPreFilterQueryBuilders().add(QueryBuilders.boolQuery().must(QueryBuilders.termQuery(DOC_FIELD, "doc_7"))); + source.size(10); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getHits()[0].getId(), equalTo("doc_7")); + }); + } + public void testRewriteOnce() { final float[] vector = new float[] { 1 }; AtomicInteger numAsyncCalls = new AtomicInteger(); diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java index bbc0f622724a..bb61fa951948 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java @@ -12,6 +12,7 @@ import java.util.Set; +import static org.elasticsearch.search.retriever.CompoundRetrieverBuilder.INNER_RETRIEVERS_FILTER_SUPPORT; import static org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilder.RRF_RETRIEVER_COMPOSITION_SUPPORTED; /** @@ -23,4 +24,9 @@ public class RRFFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(RRFRetrieverBuilder.RRF_RETRIEVER_SUPPORTED, RRF_RETRIEVER_COMPOSITION_SUPPORTED); } + + @Override + public Set getTestFeatures() { + return Set.of(INNER_RETRIEVERS_FILTER_SUPPORT); + } } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java index 4cd10801b298..84961f844216 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java @@ -62,7 +62,7 @@ public RRFRankDoc(StreamInput in) throws IOException { rank = in.readVInt(); positions = in.readIntArray(); scores = in.readFloatArray(); - if (in.getTransportVersion().onOrAfter(TransportVersions.RRF_QUERY_REWRITE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.rankConstant = in.readVInt(); } else { this.rankConstant = DEFAULT_RANK_CONSTANT; @@ -119,7 +119,7 @@ public void doWriteTo(StreamOutput out) throws IOException { out.writeVInt(rank); out.writeIntArray(positions); out.writeFloatArray(scores); - if (out.getTransportVersion().onOrAfter(TransportVersions.RRF_QUERY_REWRITE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeVInt(rankConstant); } } @@ -173,6 +173,6 @@ protected void doToXContent(XContentBuilder builder, Params params) throws IOExc @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.RRF_QUERY_REWRITE; + return TransportVersions.V_8_16_0; } } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index 792ff4eac389..f1171b74f746 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.util.Maps; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.RankDoc; @@ -108,8 +109,10 @@ public String getName() { } @Override - protected RRFRetrieverBuilder clone(List newRetrievers) { - return new RRFRetrieverBuilder(newRetrievers, this.rankWindowSize, this.rankConstant); + protected RRFRetrieverBuilder clone(List newRetrievers, List newPreFilterQueryBuilders) { + RRFRetrieverBuilder clone = new RRFRetrieverBuilder(newRetrievers, this.rankWindowSize, this.rankConstant); + clone.preFilterQueryBuilders = newPreFilterQueryBuilders; + return clone; } @Override diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml index 42c01f0b9636..cb30542d8000 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml @@ -1071,3 +1071,77 @@ setup: - match: { hits.hits.2.inner_hits.nested_data_field.hits.total.value: 0 } - match: { hits.hits.2.inner_hits.nested_vector_field.hits.total.value: 0 } + + +--- +"rrf retriever with filters to be passed to nested rrf retrievers": + - requires: + cluster_features: 'inner_retrievers_filter_support' + reason: 'requires fix for properly propagating filters to nested sub-retrievers' + + - do: + search: + _source: false + index: test + body: + retriever: + { + rrf: + { + filter: { + term: { + keyword: "technology" + } + }, + retrievers: [ + { + rrf: { + retrievers: [ + { + # this should only return docs 3 and 5 due to top level filter + standard: { + query: { + knn: { + field: vector, + query_vector: [ 4.0 ], + k: 3 + } + } + } }, + { + # this should return no docs as no docs match both biology and technology + standard: { + query: { + term: { + keyword: "biology" + } + } + } + } + ], + rank_window_size: 10, + rank_constant: 10 + } + }, + # this should only return doc 5 + { + standard: { + query: { + term: { + text: "term5" + } + } + } + } + ], + rank_window_size: 10, + rank_constant: 10 + } + } + size: 10 + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "5" } + - match: { hits.hits.1._id: "3" } + + diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index 437fb7635117..3b55295c1efc 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -557,8 +557,11 @@ public void testSuggestProfilesWithHint() throws IOException { equalTo(profileHits4.subList(2, profileHits4.size())) ); + // Exclude profile for "*" space since that can match _all_ profiles, if the full name is a substring of "user" or the name of + // another profile + final List nonWildcardProfiles = profiles.stream().filter(p -> false == p.user().fullName().endsWith("*")).toList(); // A record will not be included if name does not match even when it has matching hint - final Profile hintedProfile5 = randomFrom(profiles); + final Profile hintedProfile5 = randomFrom(nonWildcardProfiles); final List profileHits5 = Arrays.stream( doSuggest( Set.of(), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 03558e72fdca..c1be25b27c51 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -14,6 +14,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.DocWriteRequest; @@ -138,7 +139,6 @@ import java.util.function.Supplier; import java.util.stream.Collectors; -import static org.elasticsearch.TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; @@ -430,17 +430,17 @@ private boolean validateRoleDescriptorsForMixedCluster( listener.onFailure( new IllegalArgumentException( "all nodes must have version [" - + ROLE_REMOTE_CLUSTER_PRIVS + + ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion() + "] or higher to support remote cluster privileges for API keys" ) ); return false; } - if (transportVersion.before(ADD_MANAGE_ROLES_PRIVILEGE) && hasGlobalManageRolesPrivilege(roleDescriptors)) { + if (transportVersion.before(TransportVersions.V_8_16_0) && hasGlobalManageRolesPrivilege(roleDescriptors)) { listener.onFailure( new IllegalArgumentException( "all nodes must have version [" - + ADD_MANAGE_ROLES_PRIVILEGE + + TransportVersions.V_8_16_0.toReleaseVersion() + "] or higher to support the manage roles privilege for API keys" ) ); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 4ae17a679d20..23a1fc188e4a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -481,10 +481,10 @@ private Exception validateRoleDescriptor(RoleDescriptor role) { ); } else if (Arrays.stream(role.getConditionalClusterPrivileges()) .anyMatch(privilege -> privilege instanceof ConfigurableClusterPrivileges.ManageRolesPrivilege) - && clusterService.state().getMinTransportVersion().before(TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE)) { + && clusterService.state().getMinTransportVersion().before(TransportVersions.V_8_16_0)) { return new IllegalStateException( "all nodes must have version [" - + TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE.toReleaseVersion() + + TransportVersions.V_8_16_0.toReleaseVersion() + "] or higher to support the manage roles privilege" ); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index d0d37450472f..3029e00a1b4d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -163,4 +163,4 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 123} # check the "sister" test above for a likely update to the same esql.functions length check + - length: {esql.functions: 124} # check the "sister" test above for a likely update to the same esql.functions length check diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java index ea1b2cdac5a1..54b7ff6fa484 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java @@ -158,8 +158,8 @@ public void testRolesWithDescription() throws Exception { public void testRolesWithManageRoles() throws Exception { assumeTrue( - "The manage roles privilege is supported after transport version: " + TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE, - minimumTransportVersion().before(TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE) + "The manage roles privilege is supported after transport version: " + TransportVersions.V_8_16_0, + minimumTransportVersion().before(TransportVersions.V_8_16_0) ); switch (CLUSTER_TYPE) { case OLD -> { @@ -190,7 +190,7 @@ public void testRolesWithManageRoles() throws Exception { } case MIXED -> { try { - this.createClientsByVersion(TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE); + this.createClientsByVersion(TransportVersions.V_8_16_0); // succeed when role manage roles is not provided final String initialRole = randomRoleDescriptorSerialized(); createRole(client(), "my-valid-mixed-role", initialRole); @@ -232,7 +232,7 @@ public void testRolesWithManageRoles() throws Exception { e.getMessage(), containsString( "all nodes must have version [" - + TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE.toReleaseVersion() + + TransportVersions.V_8_16_0.toReleaseVersion() + "] or higher to support the manage roles privilege" ) ); @@ -246,7 +246,7 @@ public void testRolesWithManageRoles() throws Exception { e.getMessage(), containsString( "all nodes must have version [" - + TransportVersions.ADD_MANAGE_ROLES_PRIVILEGE.toReleaseVersion() + + TransportVersions.V_8_16_0.toReleaseVersion() + "] or higher to support the manage roles privilege" ) );