diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index b6b730fc3de8b..0c0ede8c3a076 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -78,11 +78,15 @@ if [[ "${USE_SNYK_CREDENTIALS:-}" == "true" ]]; then fi if [[ "${USE_PROD_DOCKER_CREDENTIALS:-}" == "true" ]]; then - DOCKER_REGISTRY_USERNAME="$(vault read -field=username secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)" - export DOCKER_REGISTRY_USERNAME + if which docker > /dev/null 2>&1; then + DOCKER_REGISTRY_USERNAME="$(vault read -field=username secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)" + export DOCKER_REGISTRY_USERNAME - DOCKER_REGISTRY_PASSWORD="$(vault read -field=password secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)" - export DOCKER_REGISTRY_PASSWORD + DOCKER_REGISTRY_PASSWORD="$(vault read -field=password secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)" + export DOCKER_REGISTRY_PASSWORD + + docker login --username "$DOCKER_REGISTRY_USERNAME" --password "$DOCKER_REGISTRY_PASSWORD" docker.elastic.co + fi fi if [[ "$BUILDKITE_AGENT_META_DATA_PROVIDER" != *"k8s"* ]]; then diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index e7ba4ba7610cd..a6af8bd35c7a0 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -62,7 +62,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.15.2", "8.16.0", "9.0.0"] + BWC_VERSION: ["8.15.3", "8.16.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index 64c5fa5060e6c..7d2b4df893266 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -8,7 +8,6 @@ steps: setup: image: - centos-7 - - debian-10 - debian-11 - opensuse-leap-15 - oraclelinux-7 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 8ef8f5954887e..425a550b9d5e1 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -9,7 +9,6 @@ steps: setup: image: - centos-7 - - debian-10 - debian-11 - opensuse-leap-15 - oraclelinux-7 @@ -288,8 +287,8 @@ steps: env: BWC_VERSION: 8.14.3 - - label: "{{matrix.image}} / 8.15.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.2 + - label: "{{matrix.image}} / 8.15.3 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.3 timeout_in_minutes: 300 matrix: setup: @@ -303,7 +302,7 @@ steps: buildDirectory: /dev/shm/bk diskSizeGb: 250 env: - BWC_VERSION: 8.15.2 + BWC_VERSION: 8.15.3 - label: "{{matrix.image}} / 8.16.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.0 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index 867ebe41ed6af..2e6f789f907fa 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -8,7 +8,6 @@ steps: setup: image: - centos-7 - - debian-10 - debian-11 - opensuse-leap-15 - oraclelinux-7 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 6afa69f8bf015..fa7e84fae160b 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -302,8 +302,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.15.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.2#bwcTest + - label: 8.15.3 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.3#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -313,7 +313,7 @@ steps: preemptible: true diskSizeGb: 250 env: - BWC_VERSION: 8.15.2 + BWC_VERSION: 8.15.3 retry: automatic: - exit_status: "-1" @@ -431,7 +431,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["8.15.2", "8.16.0", "9.0.0"] + BWC_VERSION: ["8.15.3", "8.16.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -475,7 +475,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.15.2", "8.16.0", "9.0.0"] + BWC_VERSION: ["8.15.3", "8.16.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -584,7 +584,7 @@ steps: machineType: n2-standard-8 buildDirectory: /dev/shm/bk diskSizeGb: 250 - if: build.branch == "main" || build.branch == "8.x" || build.branch == "7.17" + if: build.branch == "main" || build.branch == "7.17" - label: check-branch-consistency command: .ci/scripts/run-gradle.sh branchConsistency timeout_in_minutes: 15 diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index d5c937aa4b5a2..963596220442a 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -11,7 +11,6 @@ steps: setup: image: - centos-7 - - debian-10 - debian-11 - opensuse-leap-15 - oraclelinux-7 @@ -40,7 +39,6 @@ steps: setup: image: - centos-7 - - debian-10 - debian-11 - opensuse-leap-15 - oraclelinux-7 @@ -69,7 +67,6 @@ steps: setup: image: - centos-7 - - debian-10 - debian-11 - opensuse-leap-15 - oraclelinux-7 diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index de0212685a8a7..235a4b2dbb4ad 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -14,21 +14,6 @@ "trigger_comment_regex": "(run\\W+elasticsearch-ci.+)|(^\\s*((buildkite|@elastic(search)?machine)\\s*)?test\\s+this(\\s+please)?)", "cancel_intermediate_builds": true, "cancel_intermediate_builds_on_comment": false - }, - { - "enabled": true, - "pipeline_slug": "elasticsearch-pull-request-check-serverless-submodule", - "allow_org_users": true, - "allowed_repo_permissions": [ - "admin", - "write" - ], - "set_commit_status": false, - "build_on_commit": true, - "build_on_comment": false, - "labels": [ - "test-update-serverless" - ] } ] } diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 498727b3ecd39..de0505c61a251 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -14,6 +14,6 @@ BWC_VERSION: - "8.12.2" - "8.13.4" - "8.14.3" - - "8.15.2" + - "8.15.3" - "8.16.0" - "9.0.0" diff --git a/.ci/scripts/packaging-test.sh b/.ci/scripts/packaging-test.sh index 6b9938dabffa8..bb7547933b213 100755 --- a/.ci/scripts/packaging-test.sh +++ b/.ci/scripts/packaging-test.sh @@ -77,5 +77,6 @@ sudo -E env \ --unset=ES_JAVA_HOME \ --unset=JAVA_HOME \ SYSTEM_JAVA_HOME=`readlink -f -n $BUILD_JAVA_HOME` \ + DOCKER_CONFIG="${HOME}/.docker" \ ./gradlew -g $HOME/.gradle --scan --parallel --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ --continue $@ diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index a2f1e0c675ea5..24f58abc72493 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - - "8.15.2" + - "8.15.3" - "8.16.0" - "9.0.0" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 930de97a3c213..9480b76da20e6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -111,9 +111,9 @@ Contributing to the Elasticsearch codebase **Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch) -JDK 17 is required to build Elasticsearch. You must have a JDK 17 installation +JDK 21 is required to build Elasticsearch. You must have a JDK 21 installation with the environment variable `JAVA_HOME` referencing the path to Java home for -your JDK 17 installation. +your JDK 21 installation. Elasticsearch uses the Gradle wrapper for its build. You can execute Gradle using the wrapper via the `gradlew` script on Unix systems or `gradlew.bat` @@ -152,9 +152,9 @@ The definition of this Elasticsearch cluster can be found [here](build-tools-int ### Importing the project into IntelliJ IDEA The minimum IntelliJ IDEA version required to import the Elasticsearch project is 2020.1. -Elasticsearch builds using Java 17. When importing into IntelliJ you will need +Elasticsearch builds using Java 21. When importing into IntelliJ you will need to define an appropriate SDK. The convention is that **this SDK should be named -"17"** so that the project import will detect it automatically. For more details +"21"** so that the project import will detect it automatically. For more details on defining an SDK in IntelliJ please refer to [their documentation](https://www.jetbrains.com/help/idea/sdk.html#define-sdk). SDK definitions are global, so you can add the JDK from any project, or after project import. Importing with a missing JDK will still work, IntelliJ will diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/tdigest/SortBench.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/tdigest/SortBench.java index 423db48337586..4bec6a183fe94 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/tdigest/SortBench.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/tdigest/SortBench.java @@ -21,10 +21,12 @@ package org.elasticsearch.benchmark.tdigest; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.search.aggregations.metrics.MemoryTrackingTDigestArrays; import org.elasticsearch.tdigest.Sort; +import org.elasticsearch.tdigest.arrays.TDigestArrays; import org.elasticsearch.tdigest.arrays.TDigestDoubleArray; import org.elasticsearch.tdigest.arrays.TDigestIntArray; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -51,7 +53,8 @@ @State(Scope.Thread) public class SortBench { private final int size = 100000; - private final TDigestDoubleArray values = WrapperTDigestArrays.INSTANCE.newDoubleArray(size); + private final TDigestArrays arrays = new MemoryTrackingTDigestArrays(new NoopCircuitBreaker("default-wrapper-tdigest-arrays")); + private final TDigestDoubleArray values = arrays.newDoubleArray(size); @Param({ "0", "1", "-1" }) public int sortDirection; @@ -72,7 +75,7 @@ public void setup() { @Benchmark public void stableSort() { - TDigestIntArray order = WrapperTDigestArrays.INSTANCE.newIntArray(size); + TDigestIntArray order = arrays.newIntArray(size); for (int i = 0; i < size; i++) { order.set(i, i); } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/tdigest/TDigestBench.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/tdigest/TDigestBench.java index 58bb5b07d22cd..36ffc34c482d7 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/tdigest/TDigestBench.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/tdigest/TDigestBench.java @@ -21,9 +21,11 @@ package org.elasticsearch.benchmark.tdigest; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.search.aggregations.metrics.MemoryTrackingTDigestArrays; import org.elasticsearch.tdigest.MergingDigest; import org.elasticsearch.tdigest.TDigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; +import org.elasticsearch.tdigest.arrays.TDigestArrays; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -56,24 +58,25 @@ @Threads(1) @State(Scope.Thread) public class TDigestBench { + private static final TDigestArrays arrays = new MemoryTrackingTDigestArrays(new NoopCircuitBreaker("default-wrapper-tdigest-arrays")); public enum TDigestFactory { MERGE { @Override TDigest create(double compression) { - return new MergingDigest(WrapperTDigestArrays.INSTANCE, compression, (int) (10 * compression)); + return new MergingDigest(arrays, compression, (int) (10 * compression)); } }, AVL_TREE { @Override TDigest create(double compression) { - return TDigest.createAvlTreeDigest(WrapperTDigestArrays.INSTANCE, compression); + return TDigest.createAvlTreeDigest(arrays, compression); } }, HYBRID { @Override TDigest create(double compression) { - return TDigest.createHybridDigest(WrapperTDigestArrays.INSTANCE, compression); + return TDigest.createHybridDigest(arrays, compression); } }; diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index d4bb0c1189e8d..d3209ff27ce06 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -169,6 +169,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { '-ea', '-Djava.security.manager=allow', '-Djava.locale.providers=CLDR', + '-Dtests.testfeatures.enabled=true', '-Des.nativelibs.path="' + testLibraryPath + '"', // TODO: only open these for mockito when it is modularized '--add-opens=java.base/java.security.cert=ALL-UNNAMED', diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index aa9aaa3064024..ed1689cfb0eb9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -16,7 +16,7 @@ public enum DockerBase { DEFAULT("ubuntu:20.04", "", "apt-get"), // "latest" here is intentional, since the image name specifies "8" - UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi8", "microdnf"), + UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi", "microdnf"), // The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index 19ab49a851907..3fd59dc7a95f1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -108,6 +108,7 @@ public void execute(Task t) { "-Xmx" + System.getProperty("tests.heap.size", "512m"), "-Xms" + System.getProperty("tests.heap.size", "512m"), "-Djava.security.manager=allow", + "-Dtests.testfeatures.enabled=true", "--add-opens=java.base/java.util=ALL-UNNAMED", // TODO: only open these for mockito when it is modularized "--add-opens=java.base/java.security.cert=ALL-UNNAMED", diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java index 8971f27838578..9b28401994ee2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java @@ -30,6 +30,7 @@ import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.TaskAction; import org.gradle.process.ExecOperations; +import org.gradle.process.ExecSpec; import org.gradle.workers.WorkAction; import org.gradle.workers.WorkParameters; import org.gradle.workers.WorkerExecutor; @@ -166,6 +167,7 @@ private void pullBaseImage(String baseImage) { for (int attempt = 1; attempt <= maxAttempts; attempt++) { try { LoggedExec.exec(execOperations, spec -> { + maybeConfigureDockerConfig(spec); spec.executable("docker"); spec.args("pull"); spec.args(baseImage); @@ -181,6 +183,13 @@ private void pullBaseImage(String baseImage) { throw new GradleException("Failed to pull Docker base image [" + baseImage + "], all attempts failed"); } + private void maybeConfigureDockerConfig(ExecSpec spec) { + String dockerConfig = System.getenv("DOCKER_CONFIG"); + if (dockerConfig != null) { + spec.environment("DOCKER_CONFIG", dockerConfig); + } + } + @Override public void execute() { final Parameters parameters = getParameters(); @@ -193,6 +202,8 @@ public void execute() { final boolean isCrossPlatform = isCrossPlatform(); LoggedExec.exec(execOperations, spec -> { + maybeConfigureDockerConfig(spec); + spec.executable("docker"); if (isCrossPlatform) { diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/header/InjectHeaderTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/header/InjectHeaderTests.java index 3ba9ab0f697e0..0ef7cc7108bce 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/header/InjectHeaderTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/header/InjectHeaderTests.java @@ -26,9 +26,9 @@ public class InjectHeaderTests extends InjectFeatureTests { private static final Map headers = Map.of( "Content-Type", - "application/vnd.elasticsearch+json;compatible-with=7", + "application/vnd.elasticsearch+json;compatible-with=8", "Accept", - "application/vnd.elasticsearch+json;compatible-with=7" + "application/vnd.elasticsearch+json;compatible-with=8" ); /** diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index 47f79749cbefa..fd2516f2fdc9a 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -163,9 +163,16 @@ RUN <%= retry.loop(package_manager, " ${package_manager} update && \n" + " ${package_manager} upgrade && \n" + " ${package_manager} add --no-cache \n" + - " bash ca-certificates curl libsystemd netcat-openbsd p11-kit p11-kit-trust shadow tini unzip zip zstd && \n" + + " bash java-cacerts curl libstdc++ libsystemd netcat-openbsd p11-kit p11-kit-trust posix-libc-utils shadow tini unzip zip zstd && \n" + " rm -rf /var/cache/apk/* " ) %> + +# Set Bash as the default shell for future commands +SHELL ["/bin/bash", "-c"] + +# Optionally set Bash as the default shell in the container at runtime +CMD ["/bin/bash"] + <% } else if (docker_base == "default" || docker_base == "cloud") { %> # Change default shell to bash, then install required packages with retries. @@ -224,7 +231,7 @@ COPY --from=builder --chown=0:0 /opt /opt <% } %> ENV PATH /usr/share/elasticsearch/bin:\$PATH - +ENV SHELL /bin/bash COPY ${bin_dir}/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh # 1. Sync the user and group permissions of /etc/passwd @@ -249,6 +256,8 @@ RUN chmod g=u /etc/passwd && \\ # stays up-to-date with changes to Ubuntu's store) COPY bin/docker-openjdk /etc/ca-certificates/update.d/docker-openjdk RUN /etc/ca-certificates/update.d/docker-openjdk +<% } else if (docker_base == 'wolfi') { %> +RUN ln -sf /etc/ssl/certs/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts <% } else { %> RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts <% } %> diff --git a/docs/changelog/111519.yaml b/docs/changelog/111519.yaml deleted file mode 100644 index 8cc62fb8ed903..0000000000000 --- a/docs/changelog/111519.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 111519 -summary: "ESQL: Don't mutate the `BoolQueryBuilder` in plan" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/111535.yaml b/docs/changelog/111535.yaml deleted file mode 100644 index 4beebbf28d4e1..0000000000000 --- a/docs/changelog/111535.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 111535 -summary: Fix remote cluster credential secure settings reload -area: Authorization -type: bug -issues: [] diff --git a/docs/changelog/111548.yaml b/docs/changelog/111548.yaml deleted file mode 100644 index ca9e5ae622894..0000000000000 --- a/docs/changelog/111548.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 111548 -summary: Json parsing exceptions should not cause 500 errors -area: Infra/Core -type: bug -issues: - - 111542 diff --git a/docs/changelog/111834.yaml b/docs/changelog/111834.yaml new file mode 100644 index 0000000000000..4548dee5f91e5 --- /dev/null +++ b/docs/changelog/111834.yaml @@ -0,0 +1,5 @@ +pr: 111834 +summary: Add inner hits support to semantic query +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/111932.yaml b/docs/changelog/111932.yaml deleted file mode 100644 index ce840ecebcff0..0000000000000 --- a/docs/changelog/111932.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 111932 -summary: Fix union-types where one index is missing the field -area: ES|QL -type: bug -issues: - - 111912 diff --git a/docs/changelog/112092.yaml b/docs/changelog/112092.yaml new file mode 100644 index 0000000000000..35c731074d760 --- /dev/null +++ b/docs/changelog/112092.yaml @@ -0,0 +1,5 @@ +pr: 112092 +summary: "Apply auto-flattening to `subobjects: auto`" +area: Mapping +type: enhancement +issues: [] diff --git a/docs/changelog/112400.yaml b/docs/changelog/112400.yaml deleted file mode 100644 index 6d622e5fb5248..0000000000000 --- a/docs/changelog/112400.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112400 -summary: Make sure file accesses in `DnRoleMapper` are done in stack frames with permissions -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/112444.yaml b/docs/changelog/112444.yaml deleted file mode 100644 index bfa4fd693f0e0..0000000000000 --- a/docs/changelog/112444.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 112444 -summary: Full coverage of ECS by ecs@mappings when `date_detection` is disabled -area: Mapping -type: bug -issues: - - 112398 diff --git a/docs/changelog/112581.yaml b/docs/changelog/112581.yaml deleted file mode 100644 index 489b4780c06fb..0000000000000 --- a/docs/changelog/112581.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112581 -summary: Fix missing header in `put_geoip_database` JSON spec -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/112610.yaml b/docs/changelog/112610.yaml deleted file mode 100644 index 3d67a80a8f0b3..0000000000000 --- a/docs/changelog/112610.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 112610 -summary: Support widening of numeric types in union-types -area: ES|QL -type: bug -issues: - - 111277 diff --git a/docs/changelog/112649.yaml b/docs/changelog/112649.yaml deleted file mode 100644 index e3cf1e8e34881..0000000000000 --- a/docs/changelog/112649.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112649 -summary: Allowlist `tracestate` header on remote server port -area: Security -type: bug -issues: [] diff --git a/docs/changelog/112703.yaml b/docs/changelog/112703.yaml deleted file mode 100644 index a428e8c4e2339..0000000000000 --- a/docs/changelog/112703.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112703 -summary: JSON parse failures should be 4xx codes -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/112713.yaml b/docs/changelog/112713.yaml deleted file mode 100644 index 1ccf451b13f82..0000000000000 --- a/docs/changelog/112713.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112713 -summary: Fix encoding of dynamic arrays in ignored source -area: Logs -type: bug -issues: [] diff --git a/docs/changelog/112720.yaml b/docs/changelog/112720.yaml deleted file mode 100644 index a44ea5a699520..0000000000000 --- a/docs/changelog/112720.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112720 -summary: Fix NPE in `dense_vector` stats -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/112761.yaml b/docs/changelog/112761.yaml new file mode 100644 index 0000000000000..fe63f38f365a4 --- /dev/null +++ b/docs/changelog/112761.yaml @@ -0,0 +1,6 @@ +pr: 112761 +summary: Fix collapse interaction with stored fields +area: Search +type: bug +issues: + - 112646 diff --git a/docs/changelog/112872.yaml b/docs/changelog/112872.yaml deleted file mode 100644 index 5a6f3af03961d..0000000000000 --- a/docs/changelog/112872.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 112872 -summary: Fix parsing error in `_terms_enum` API -area: Search -type: bug -issues: - - 94378 diff --git a/docs/changelog/113314.yaml b/docs/changelog/113314.yaml new file mode 100644 index 0000000000000..c496ad3dd86f1 --- /dev/null +++ b/docs/changelog/113314.yaml @@ -0,0 +1,6 @@ +pr: 113314 +summary: "[ES|QL] Check expression resolved before checking its data type in `ImplicitCasting`" +area: ES|QL +type: bug +issues: + - 113242 diff --git a/docs/changelog/113413.yaml b/docs/changelog/113413.yaml new file mode 100644 index 0000000000000..8b1104ba61fe4 --- /dev/null +++ b/docs/changelog/113413.yaml @@ -0,0 +1,6 @@ +pr: 113413 +summary: Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. +area: Infra/REST API +type: bug +issues: + - 113413 diff --git a/docs/changelog/113552.yaml b/docs/changelog/113552.yaml new file mode 100644 index 0000000000000..48f7da309e82e --- /dev/null +++ b/docs/changelog/113552.yaml @@ -0,0 +1,5 @@ +pr: 113552 +summary: Tag redacted document in ingest metadata +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/113570.yaml b/docs/changelog/113570.yaml new file mode 100644 index 0000000000000..8cfad9195c5cd --- /dev/null +++ b/docs/changelog/113570.yaml @@ -0,0 +1,7 @@ +pr: 113570 +summary: Fix `ignore_above` handling in synthetic source when index level setting + is used +area: Logs +type: bug +issues: + - 113538 diff --git a/docs/changelog/113699.yaml b/docs/changelog/113699.yaml new file mode 100644 index 0000000000000..3876c8147e7eb --- /dev/null +++ b/docs/changelog/113699.yaml @@ -0,0 +1,5 @@ +pr: 113699 +summary: "[ESQL] Fix init value in max float aggregation" +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index 61c58cea95b83..adf8229712ecc 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -1716,6 +1716,10 @@ See <> for more information about disk watermarks a `io_stats` (Linux only):: (objects) Contains I/O statistics for the node. + +NOTE: These statistics are derived from the `/proc/diskstats` kernel interface. +This interface accounts for IO performed by all processes on the system, even +if you are running {es} within a container. + .Properties of `io_stats` [%collapsible%open] diff --git a/docs/reference/esql/functions/description/to_date_nanos.asciidoc b/docs/reference/esql/functions/description/to_date_nanos.asciidoc new file mode 100644 index 0000000000000..3fac7295f1bed --- /dev/null +++ b/docs/reference/esql/functions/description/to_date_nanos.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input to a nanosecond-resolution date value (aka date_nanos). + +NOTE: The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch. diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json new file mode 100644 index 0000000000000..bafbcf2bc2038 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -0,0 +1,9 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_date_nanos", + "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", + "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", + "signatures" : [ ], + "preview" : true +} diff --git a/docs/reference/esql/functions/kibana/docs/mv_avg.md b/docs/reference/esql/functions/kibana/docs/mv_avg.md index c5163f36129bf..c3d7e5423f724 100644 --- a/docs/reference/esql/functions/kibana/docs/mv_avg.md +++ b/docs/reference/esql/functions/kibana/docs/mv_avg.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### MV_AVG -Converts a multivalued field into a single valued field containing the average of all the values. +Converts a multivalued field into a single valued field containing the average of all of the values. ``` ROW a=[3, 5, 1, 6] diff --git a/docs/reference/esql/functions/kibana/docs/mv_sum.md b/docs/reference/esql/functions/kibana/docs/mv_sum.md index 987017b34b743..16285d3c7229b 100644 --- a/docs/reference/esql/functions/kibana/docs/mv_sum.md +++ b/docs/reference/esql/functions/kibana/docs/mv_sum.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### MV_SUM -Converts a multivalued field into a single valued field containing the sum of all the values. +Converts a multivalued field into a single valued field containing the sum of all of the values. ``` ROW a=[3, 5, 6] diff --git a/docs/reference/esql/functions/kibana/docs/to_date_nanos.md b/docs/reference/esql/functions/kibana/docs/to_date_nanos.md new file mode 100644 index 0000000000000..0294802485ccb --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_date_nanos.md @@ -0,0 +1,8 @@ + + +### TO_DATE_NANOS +Converts an input to a nanosecond-resolution date value (aka date_nanos). + +Note: The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch. diff --git a/docs/reference/esql/functions/kibana/inline_cast.json b/docs/reference/esql/functions/kibana/inline_cast.json index f1aa283c52e95..81a1966773238 100644 --- a/docs/reference/esql/functions/kibana/inline_cast.json +++ b/docs/reference/esql/functions/kibana/inline_cast.json @@ -3,6 +3,7 @@ "boolean" : "to_boolean", "cartesian_point" : "to_cartesianpoint", "cartesian_shape" : "to_cartesianshape", + "date_nanos" : "to_date_nanos", "date_period" : "to_dateperiod", "datetime" : "to_datetime", "double" : "to_double", diff --git a/docs/reference/esql/functions/layout/to_date_nanos.asciidoc b/docs/reference/esql/functions/layout/to_date_nanos.asciidoc new file mode 100644 index 0000000000000..977a0ac969e5d --- /dev/null +++ b/docs/reference/esql/functions/layout/to_date_nanos.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_date_nanos]] +=== `TO_DATE_NANOS` + +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_date_nanos.svg[Embedded,opts=inline] + +include::../parameters/to_date_nanos.asciidoc[] +include::../description/to_date_nanos.asciidoc[] +include::../types/to_date_nanos.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/to_date_nanos.asciidoc b/docs/reference/esql/functions/parameters/to_date_nanos.asciidoc new file mode 100644 index 0000000000000..224f474fa64e3 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_date_nanos.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: +Input value. The input can be a single- or multi-valued column or an expression. diff --git a/docs/reference/esql/functions/signature/categorize.svg b/docs/reference/esql/functions/signature/categorize.svg index 3f36f10382840..c52fd1763eea1 100644 --- a/docs/reference/esql/functions/signature/categorize.svg +++ b/docs/reference/esql/functions/signature/categorize.svg @@ -1 +1 @@ -CATEGORIZE(field) +CATEGORIZE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/qstr.svg b/docs/reference/esql/functions/signature/qstr.svg index 0d3841b071cef..fb6114822ae63 100644 --- a/docs/reference/esql/functions/signature/qstr.svg +++ b/docs/reference/esql/functions/signature/qstr.svg @@ -1 +1 @@ -QSTR(query) +QSTR(query) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_date_nanos.svg b/docs/reference/esql/functions/signature/to_date_nanos.svg new file mode 100644 index 0000000000000..0b24b56429588 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_date_nanos.svg @@ -0,0 +1 @@ +TO_DATE_NANOS(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/to_date_nanos.asciidoc b/docs/reference/esql/functions/types/to_date_nanos.asciidoc new file mode 100644 index 0000000000000..1f50b65f25a77 --- /dev/null +++ b/docs/reference/esql/functions/types/to_date_nanos.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | result +date_nanos +|=== diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 4df72ba672092..bee39bf9b9851 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -49,13 +49,12 @@ The type of {infer} task that the model performs. `dry_run`:: (Optional, Boolean) -When `true`, checks the {infer} processors that reference the endpoint and -returns them in a list, but does not delete the endpoint. Defaults to `false`. +When `true`, checks the `semantic_text` fields and {infer} processors that reference the endpoint and returns them in a list, but does not delete the endpoint. +Defaults to `false`. `force`:: (Optional, Boolean) -Deletes the endpoint regardless if it's used in an {infer} pipeline or in a -`semantic_text` field. +Deletes the endpoint regardless if it's used in a `semantic_text` field or in an {infer} pipeline. [discrete] diff --git a/docs/reference/ingest/processors/inference.asciidoc b/docs/reference/ingest/processors/inference.asciidoc index c942959d34e53..fa4f246cdd7c8 100644 --- a/docs/reference/ingest/processors/inference.asciidoc +++ b/docs/reference/ingest/processors/inference.asciidoc @@ -455,6 +455,29 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio ======= ===== +[discrete] +[[inference-processor-text-similarity-opt]] +==== Text similarity configuration options + +`text_similarity`::: +(Object, optional) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity] ++ +.Properties of text_similarity inference +[%collapsible%open] +===== +`span_score_combination_function`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func] + +`tokenization`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] ++ +Refer to <> to review the properties of the +`tokenization` object. +===== + [discrete] [[inference-processor-zero-shot-opt]] diff --git a/docs/reference/ingest/processors/redact.asciidoc b/docs/reference/ingest/processors/redact.asciidoc index 6706106e92655..9b8ac1e15d1a8 100644 --- a/docs/reference/ingest/processors/redact.asciidoc +++ b/docs/reference/ingest/processors/redact.asciidoc @@ -39,6 +39,7 @@ patterns. Legacy Grok patterns are not supported. | `ignore_missing` | no | `true` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document include::common-options.asciidoc[] | `skip_if_unlicensed` | no | `false` | If `true` and the current license does not support running redact processors, then the processor quietly exits without modifying the document +| `trace_redact` | no | `false` | If `true` then ingest metadata `_ingest._redact._is_redacted` is set to `true` if the document has been redacted |====== In this example the predefined `IP` Grok pattern is used to match diff --git a/docs/reference/mapping/params/subobjects.asciidoc b/docs/reference/mapping/params/subobjects.asciidoc index b0a5d3817c332..63e8e3c2db3fe 100644 --- a/docs/reference/mapping/params/subobjects.asciidoc +++ b/docs/reference/mapping/params/subobjects.asciidoc @@ -10,7 +10,7 @@ where for instance a field `metrics.time` holds a value too, which is common whe A document holding a value for both `metrics.time.max` and `metrics.time` gets rejected given that `time` would need to be a leaf field to hold a value as well as an object to hold the `max` sub-field. -The `subobjects` setting, which can be applied only to the top-level mapping definition and +The `subobjects: false` setting, which can be applied only to the top-level mapping definition and to <> fields, disables the ability for an object to hold further subobjects and makes it possible to store documents where field names contain dots and share common prefixes. From the example above, if the object container `metrics` has `subobjects` set to `false`, it can hold values for both `time` and `time.max` directly @@ -109,26 +109,138 @@ PUT my-index-000001/_doc/metric_1 <1> The entire mapping is configured to not support objects. <2> The document does not support objects +Setting `subobjects: false` disallows the definition of <> and <> sub-fields, which +can be too restrictive in cases where it's desirable to have <> objects or sub-objects with specific +behavior (e.g. with `enabled:false`). In this case, it's possible to set `subobjects: auto`, which +<> whenever possible and falls back to creating an object mapper otherwise (instead of +rejecting the mapping as `subobjects: false` does). For instance: + +[source,console] +-------------------------------------------------- +PUT my-index-000002 +{ + "mappings": { + "properties": { + "metrics": { + "type": "object", + "subobjects": "auto", <1> + "properties": { + "inner": { + "type": "object", + "enabled": false + }, + "nested": { + "type": "nested" + } + } + } + } + } +} + +PUT my-index-000002/_doc/metric_1 +{ + "metrics.time" : 100, <2> + "metrics.time.min" : 10, + "metrics.time.max" : 900 +} + +PUT my-index-000002/_doc/metric_2 +{ + "metrics" : { <3> + "time" : 100, + "time.min" : 10, + "time.max" : 900, + "inner": { + "foo": "bar", + "path.to.some.field": "baz" + }, + "nested": [ + { "id": 10 }, + { "id": 1 } + ] + } +} + +GET my-index-000002/_mapping +-------------------------------------------------- + +[source,console-result] +-------------------------------------------------- +{ + "my-index-000002" : { + "mappings" : { + "properties" : { + "metrics" : { + "subobjects" : auto, + "properties" : { + "inner": { <4> + "type": "object", + "enabled": false + }, + "nested": { + "type": "nested", + "properties" : { + "id" : { + "type" : "long" + } + } + }, + "time" : { + "type" : "long" + }, + "time.min" : { + "type" : "long" + }, + "time.max" : { + "type" : "long" + } + } + } + } + } + } +} +-------------------------------------------------- + +<1> The `metrics` field can only hold statically defined objects, namely `inner` and `nested`. +<2> Sample document holding flat paths +<3> Sample document holding an object (configured with sub-objects) and its leaf sub-fields +<4> The resulting mapping where dots in field names (`time.min`, `time_max`), as well as the +statically-defined sub-objects `inner` and `nested`, were preserved + The `subobjects` setting for existing fields and the top-level mapping definition cannot be updated. +[[auto-flattening]] ==== Auto-flattening object mappings -It is generally recommended to define the properties of an object that is configured with `subobjects: false` with dotted field names -(as shown in the first example). -However, it is also possible to define these properties as sub-objects in the mappings. -In that case, the mapping will be automatically flattened before it is stored. -This makes it easier to re-use existing mappings without having to re-write them. +It is generally recommended to define the properties of an object that is configured with `subobjects: false` or +`subobjects: auto` with dotted field names (as shown in the first example). However, it is also possible to define +these properties as sub-objects in the mappings. In that case, the mapping will be automatically flattened before +it is stored. This makes it easier to re-use existing mappings without having to re-write them. + +Note that auto-flattening does not apply if any of the following <> are set +on object mappings that are defined under an object configured with `subobjects: false` or `subobjects: auto`: -Note that auto-flattening will not work when certain <> are set -on object mappings that are defined under an object configured with `subobjects: false`: +* The <> mapping parameter is `false`. +* The <> mapping parameter contradicts the implicit or explicit value of the parent. +For example, when `dynamic` is set to `false` in the root of the mapping, object mappers that set `dynamic` to `true` +can't be auto-flattened. +* The <> mapping parameter is set to `auto` or `true` explicitly. -* The <> mapping parameter must not be `false`. -* The <> mapping parameter must not contradict the implicit or explicit value of the parent. For example, when `dynamic` is set to `false` in the root of the mapping, object mappers that set `dynamic` to `true` can't be auto-flattened. -* The <> mapping parameter must not be set to `true` explicitly. +If such a sub-object is detected, the behavior depends on the `subobjects` value: + +* `subobjects: false` is not compatible, so a mapping error is returned during mapping construction. +* `subobjects: auto` reverts to adding the object to the mapping, bypassing auto-flattening for it. Still, any +intermediate objects will be auto-flattened if applicable (i.e. the object name gets directly attached under the parent +object with `subobjects: auto`). Auto-flattening can be applied within sub-objects, if they are configured with +`subobjects: auto` too. + +Auto-flattening example with `subobjects: false`: [source,console] -------------------------------------------------- -PUT my-index-000002 +PUT my-index-000003 { "mappings": { "properties": { @@ -147,13 +259,13 @@ PUT my-index-000002 } } } -GET my-index-000002/_mapping +GET my-index-000003/_mapping -------------------------------------------------- [source,console-result] -------------------------------------------------- { - "my-index-000002" : { + "my-index-000003" : { "mappings" : { "properties" : { "metrics" : { @@ -175,5 +287,85 @@ GET my-index-000002/_mapping <1> The metrics object can contain further object mappings that will be auto-flattened. Object mappings at this level must not set certain mapping parameters as explained above. -<2> This field will be auto-flattened to `"time.min"` before the mapping is stored. -<3> The auto-flattened `"time.min"` field can be inspected by looking at the index mapping. +<2> This field will be auto-flattened to `time.min` before the mapping is stored. +<3> The auto-flattened `time.min` field can be inspected by looking at the index mapping. + +Auto-flattening example with `subobjects: auto`: + +[source,console] +-------------------------------------------------- +PUT my-index-000004 +{ + "mappings": { + "properties": { + "metrics": { + "subobjects": "auto", + "properties": { + "time": { + "type": "object", <1> + "properties": { + "min": { "type": "long" } <2> + } + }, + "to": { + "type": "object", + "properties": { + "inner_metrics": { <3> + "type": "object", + "subobjects": "auto", + "properties": { + "time": { + "type": "object", + "properties": { + "max": { "type": "long" } <4> + } + } + } + } + } + } + } + } + } + } +} +GET my-index-000004/_mapping +-------------------------------------------------- + +[source,console-result] +-------------------------------------------------- +{ + "my-index-000004" : { + "mappings" : { + "properties" : { + "metrics" : { + "subobjects" : "auto", + "properties" : { + "time.min" : { <5> + "type" : "long" + }, + "to.inner_metrics" : { <6> + "subobjects" : "auto", + "properties" : { + "time.max" : { <7> + "type" : "long" + } + } + } + } + } + } + } + } +} +-------------------------------------------------- + +<1> The metrics object can contain further object mappings that may be auto-flattened, depending on their mapping +parameters as explained above. +<2> This field will be auto-flattened to `time.min` before the mapping is stored. +<3> This object has param `subobjects: auto` so it can't be auto-flattened. Its parent does qualify for auto-flattening, +so it becomes `to.inner_metrics` before the mapping is stored. +<4> This field will be auto-flattened to `time.max` before the mapping is stored. +<5> The auto-flattened `time.min` field can be inspected by looking at the index mapping. +<6> The inner object `to.inner_metrics` can be inspected by looking at the index mapping. +<7> The auto-flattened `time.max` field can be inspected by looking at the index mapping. diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index a006f288dc66d..d0fdf0145aa58 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -14,9 +14,8 @@ The `semantic_text` field type specifies an inference endpoint identifier that w You can create the inference endpoint by using the <>. This field type and the <> type make it simpler to perform semantic search on your data. -Using `semantic_text`, you won't need to specify how to generate embeddings for -your data, or how to index it. The inference endpoint automatically determines -the embedding generation, indexing, and query to use. +Using `semantic_text`, you won't need to specify how to generate embeddings for your data, or how to index it. +The {infer} endpoint automatically determines the embedding generation, indexing, and query to use. [source,console] ------------------------------------------------------------ @@ -32,7 +31,29 @@ PUT my-index-000001 } } ------------------------------------------------------------ -// TEST[skip:TBD] +// TEST[skip:Requires inference endpoint] + + +The recommended way to use semantic_text is by having dedicated {infer} endpoints for ingestion and search. +This ensures that search speed remains unaffected by ingestion workloads, and vice versa. +After creating dedicated {infer} endpoints for both, you can reference them using the `inference_id` and `search_inference_id` parameters when setting up the index mapping for an index that uses the `semantic_text` field. + +[source,console] +------------------------------------------------------------ +PUT my-index-000002 +{ + "mappings": { + "properties": { + "inference_field": { + "type": "semantic_text", + "inference_id": "my-elser-endpoint-for-ingest", + "search_inference_id": "my-elser-endpoint-for-search" + } + } + } +} +------------------------------------------------------------ +// TEST[skip:Requires inference endpoint] [discrete] @@ -41,9 +62,15 @@ PUT my-index-000001 `inference_id`:: (Required, string) -Inference endpoint that will be used to generate the embeddings for the field. +{infer-cap} endpoint that will be used to generate the embeddings for the field. Use the <> to create the endpoint. +If `search_inference_id` is specified, the {infer} endpoint defined by `inference_id` will only be used at index time. +`search_inference_id`:: +(Optional, string) +{infer-cap} endpoint that will be used to generate embeddings at query time. +Use the <> to create the endpoint. +If not specified, the {infer} endpoint defined by `inference_id` will be used at both index and query time. [discrete] [[infer-endpoint-validation]] @@ -55,6 +82,7 @@ When the first document is indexed, the `inference_id` will be used to generate WARNING: Removing an {infer} endpoint will cause ingestion of documents and semantic queries to fail on indices that define `semantic_text` fields with that {infer} endpoint as their `inference_id`. Trying to <> that is used on a `semantic_text` field will result in an error. + [discrete] [[auto-text-chunking]] ==== Automatic text chunking @@ -183,6 +211,7 @@ PUT test-index/_bulk Notice that both the `semantic_text` field and the source field are updated in the bulk request. + [discrete] [[limitations]] ==== Limitations diff --git a/docs/reference/query-dsl/semantic-query.asciidoc b/docs/reference/query-dsl/semantic-query.asciidoc index 22b5e6c5e6aad..f3f6aca3fd07a 100644 --- a/docs/reference/query-dsl/semantic-query.asciidoc +++ b/docs/reference/query-dsl/semantic-query.asciidoc @@ -25,7 +25,7 @@ GET my-index-000001/_search } } ------------------------------------------------------------ -// TEST[skip:TBD] +// TEST[skip: Requires inference endpoints] [discrete] @@ -40,9 +40,209 @@ The `semantic_text` field to perform the query on. (Required, string) The query text to be searched for on the field. +`inner_hits`:: +(Optional, object) +Retrieves the specific passages that match the query. +See <> for more information. ++ +.Properties of `inner_hits` +[%collapsible%open] +==== +`from`:: +(Optional, integer) +The offset from the first matching passage to fetch. +Used to paginate through the passages. +Defaults to `0`. + +`size`:: +(Optional, integer) +The maximum number of matching passages to return. +Defaults to `3`. +==== Refer to <> to learn more about semantic search using `semantic_text` and `semantic` query. +[discrete] +[[semantic-query-passage-ranking]] +==== Passage ranking with the `semantic` query +The `inner_hits` parameter can be used for _passage ranking_, which allows you to determine which passages in the document best match the query. +For example, if you have a document that covers varying topics: + +[source,console] +------------------------------------------------------------ +POST my-index/_doc/lake_tahoe +{ + "inference_field": [ + "Lake Tahoe is the largest alpine lake in North America", + "When hiking in the area, please be on alert for bears" + ] +} +------------------------------------------------------------ +// TEST[skip: Requires inference endpoints] + +You can use passage ranking to find the passage that best matches your query: + +[source,console] +------------------------------------------------------------ +GET my-index/_search +{ + "query": { + "semantic": { + "field": "inference_field", + "query": "mountain lake", + "inner_hits": { } + } + } +} +------------------------------------------------------------ +// TEST[skip: Requires inference endpoints] + +[source,console-result] +------------------------------------------------------------ +{ + "took": 67, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 10.844536, + "hits": [ + { + "_index": "my-index", + "_id": "lake_tahoe", + "_score": 10.844536, + "_source": { + ... + }, + "inner_hits": { <1> + "inference_field": { + "hits": { + "total": { + "value": 2, + "relation": "eq" + }, + "max_score": 10.844536, + "hits": [ + { + "_index": "my-index", + "_id": "lake_tahoe", + "_nested": { + "field": "inference_field.inference.chunks", + "offset": 0 + }, + "_score": 10.844536, + "_source": { + "text": "Lake Tahoe is the largest alpine lake in North America" + } + }, + { + "_index": "my-index", + "_id": "lake_tahoe", + "_nested": { + "field": "inference_field.inference.chunks", + "offset": 1 + }, + "_score": 3.2726858, + "_source": { + "text": "When hiking in the area, please be on alert for bears" + } + } + ] + } + } + } + } + ] + } +} +------------------------------------------------------------ +<1> Ranked passages will be returned using the <>, with `` set to the `semantic_text` field name. + +By default, the top three matching passages will be returned. +You can use the `size` parameter to control the number of passages returned and the `from` parameter to page through the matching passages: + +[source,console] +------------------------------------------------------------ +GET my-index/_search +{ + "query": { + "semantic": { + "field": "inference_field", + "query": "mountain lake", + "inner_hits": { + "from": 1, + "size": 1 + } + } + } +} +------------------------------------------------------------ +// TEST[skip: Requires inference endpoints] + +[source,console-result] +------------------------------------------------------------ +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 10.844536, + "hits": [ + { + "_index": "my-index", + "_id": "lake_tahoe", + "_score": 10.844536, + "_source": { + ... + }, + "inner_hits": { + "inference_field": { + "hits": { + "total": { + "value": 2, + "relation": "eq" + }, + "max_score": 10.844536, + "hits": [ + { + "_index": "my-index", + "_id": "lake_tahoe", + "_nested": { + "field": "inference_field.inference.chunks", + "offset": 1 + }, + "_score": 3.2726858, + "_source": { + "text": "When hiking in the area, please be on alert for bears" + } + } + ] + } + } + } + } + ] + } +} +------------------------------------------------------------ + [discrete] [[hybrid-search-semantic]] ==== Hybrid search with the `semantic` query @@ -79,7 +279,7 @@ POST my-index/_search } } ------------------------------------------------------------ -// TEST[skip:TBD] +// TEST[skip: Requires inference endpoints] You can also use semantic_text as part of <> to make ranking relevant results easier: @@ -116,12 +316,12 @@ GET my-index/_search } } ------------------------------------------------------------ -// TEST[skip:TBD] +// TEST[skip: Requires inference endpoints] [discrete] [[advanced-search]] -=== Advanced search on `semantic_text` fields +==== Advanced search on `semantic_text` fields The `semantic` query uses default settings for searching on `semantic_text` fields for ease of use. If you want to fine-tune a search on a `semantic_text` field, you need to know the task type used by the `inference_id` configured in `semantic_text`. @@ -135,7 +335,7 @@ on a `semantic_text` field, it is not supported to use the `semantic_query` on a [discrete] [[search-sparse-inference]] -==== Search with `sparse_embedding` inference +===== Search with `sparse_embedding` inference When the {infer} endpoint uses a `sparse_embedding` model, you can use a <> on a <> field in the following way: @@ -157,14 +357,14 @@ GET test-index/_search } } ------------------------------------------------------------ -// TEST[skip:TBD] +// TEST[skip: Requires inference endpoints] You can customize the `sparse_vector` query to include specific settings, like <>. [discrete] [[search-text-inferece]] -==== Search with `text_embedding` inference +===== Search with `text_embedding` inference When the {infer} endpoint uses a `text_embedding` model, you can use a <> on a `semantic_text` field in the following way: @@ -190,6 +390,6 @@ GET test-index/_search } } ------------------------------------------------------------ -// TEST[skip:TBD] +// TEST[skip: Requires inference endpoints] You can customize the `knn` query to include specific settings, like `num_candidates` and `k`. diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 3a9c12caebad9..a75a1a3ce1042 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -378,7 +378,7 @@ If you use a Glacier storage class, or another unsupported storage class, or object expiry, then you may permanently lose access to your repository contents. -You may use the `intellligent_tiering` storage class to automatically manage +You may use the `intelligent_tiering` storage class to automatically manage the class of objects, but you must not enable the optional Archive Access or Deep Archive Access tiers. If you use these tiers then you may permanently lose access to your repository contents. diff --git a/docs/reference/troubleshooting/common-issues/high-jvm-memory-pressure.asciidoc b/docs/reference/troubleshooting/common-issues/high-jvm-memory-pressure.asciidoc index 267d6594b8025..3469a0ca5bf42 100644 --- a/docs/reference/troubleshooting/common-issues/high-jvm-memory-pressure.asciidoc +++ b/docs/reference/troubleshooting/common-issues/high-jvm-memory-pressure.asciidoc @@ -66,6 +66,8 @@ searches, consider the following setting changes: <> cluster setting. +* Set a default search timeout using the <> cluster setting. + [source,console] ---- PUT _settings diff --git a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java index 9ff31a191ce37..387d05db84441 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java +++ b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java @@ -22,14 +22,13 @@ public enum RestApiVersion { V_8(8), + @UpdateForV9 // remove all references to V_7 then delete this annotation V_7(7); public final byte major; - @UpdateForV9 - // We need to bump current and previous to V_9 and V_8, respectively - private static final RestApiVersion CURRENT = V_8; - private static final RestApiVersion PREVIOUS = V_7; + private static final RestApiVersion CURRENT = V_9; + private static final RestApiVersion PREVIOUS = V_8; RestApiVersion(int major) { this.major = (byte) major; @@ -67,8 +66,6 @@ public static Predicate onOrAfter(RestApiVersion restApiVersion) }; } - @UpdateForV9 - // Right now we return api version 8 for major version 9 until we bump the api version above public static RestApiVersion forMajor(int major) { switch (major) { case 7 -> { @@ -78,7 +75,7 @@ public static RestApiVersion forMajor(int major) { return V_8; } case 9 -> { - return V_8; + return V_9; } default -> throw new IllegalArgumentException("Unknown REST API version " + major); } diff --git a/libs/tdigest/build.gradle b/libs/tdigest/build.gradle index 771df2e83d85d..df60862b27386 100644 --- a/libs/tdigest/build.gradle +++ b/libs/tdigest/build.gradle @@ -22,6 +22,8 @@ apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' dependencies { + api project(':libs:elasticsearch-core') + testImplementation(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'elasticsearch-tdigest' } diff --git a/libs/tdigest/src/main/java/module-info.java b/libs/tdigest/src/main/java/module-info.java index 8edaff3f31d8c..cc7ff1810905f 100644 --- a/libs/tdigest/src/main/java/module-info.java +++ b/libs/tdigest/src/main/java/module-info.java @@ -18,6 +18,8 @@ */ module org.elasticsearch.tdigest { + requires org.elasticsearch.base; + exports org.elasticsearch.tdigest; exports org.elasticsearch.tdigest.arrays; } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLGroupTree.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLGroupTree.java index 8528db2128729..a1a65e1e71cde 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLGroupTree.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLGroupTree.java @@ -21,6 +21,8 @@ package org.elasticsearch.tdigest; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tdigest.arrays.TDigestArrays; import org.elasticsearch.tdigest.arrays.TDigestDoubleArray; import org.elasticsearch.tdigest.arrays.TDigestLongArray; @@ -31,7 +33,7 @@ /** * A tree of t-digest centroids. */ -final class AVLGroupTree extends AbstractCollection { +final class AVLGroupTree extends AbstractCollection implements Releasable { /* For insertions into the tree */ private double centroid; private long count; @@ -267,4 +269,8 @@ private void checkAggregates(int node) { } } + @Override + public void close() { + Releasables.close(centroids, counts, aggregatedCounts, tree); + } } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLTreeDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLTreeDigest.java index c28f86b9b8edc..f6b027edb1e9c 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLTreeDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLTreeDigest.java @@ -21,6 +21,7 @@ package org.elasticsearch.tdigest; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tdigest.arrays.TDigestArrays; import java.util.Collection; @@ -153,26 +154,27 @@ public void compress() { } needsCompression = false; - AVLGroupTree centroids = summary; - this.summary = new AVLGroupTree(arrays); + try (AVLGroupTree centroids = summary) { + this.summary = new AVLGroupTree(arrays); - final int[] nodes = new int[centroids.size()]; - nodes[0] = centroids.first(); - for (int i = 1; i < nodes.length; ++i) { - nodes[i] = centroids.next(nodes[i - 1]); - assert nodes[i] != IntAVLTree.NIL; - } - assert centroids.next(nodes[nodes.length - 1]) == IntAVLTree.NIL; + final int[] nodes = new int[centroids.size()]; + nodes[0] = centroids.first(); + for (int i = 1; i < nodes.length; ++i) { + nodes[i] = centroids.next(nodes[i - 1]); + assert nodes[i] != IntAVLTree.NIL; + } + assert centroids.next(nodes[nodes.length - 1]) == IntAVLTree.NIL; - for (int i = centroids.size() - 1; i > 0; --i) { - final int other = gen.nextInt(i + 1); - final int tmp = nodes[other]; - nodes[other] = nodes[i]; - nodes[i] = tmp; - } + for (int i = centroids.size() - 1; i > 0; --i) { + final int other = gen.nextInt(i + 1); + final int tmp = nodes[other]; + nodes[other] = nodes[i]; + nodes[i] = tmp; + } - for (int node : nodes) { - add(centroids.mean(node), centroids.count(node)); + for (int node : nodes) { + add(centroids.mean(node), centroids.count(node)); + } } } @@ -356,4 +358,9 @@ public int byteSize() { compress(); return 64 + summary.size() * 13; } + + @Override + public void close() { + Releasables.close(summary); + } } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/HybridDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/HybridDigest.java index c28a99fbd6d44..8d03ce4e303a6 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/HybridDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/HybridDigest.java @@ -19,6 +19,7 @@ package org.elasticsearch.tdigest; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tdigest.arrays.TDigestArrays; import java.util.Collection; @@ -110,6 +111,7 @@ public void reserve(long size) { } mergingDigest.reserve(size); // Release the allocated SortingDigest. + sortingDigest.close(); sortingDigest = null; } else { sortingDigest.reserve(size); @@ -196,4 +198,9 @@ public int byteSize() { } return sortingDigest.byteSize(); } + + @Override + public void close() { + Releasables.close(sortingDigest, mergingDigest); + } } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/IntAVLTree.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/IntAVLTree.java index cda8aecdb2ccc..b4a82257693d8 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/IntAVLTree.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/IntAVLTree.java @@ -21,6 +21,8 @@ package org.elasticsearch.tdigest; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tdigest.arrays.TDigestArrays; import org.elasticsearch.tdigest.arrays.TDigestByteArray; import org.elasticsearch.tdigest.arrays.TDigestIntArray; @@ -33,7 +35,7 @@ * want to add data to the nodes, typically by using arrays and node * identifiers as indices. */ -abstract class IntAVLTree { +abstract class IntAVLTree implements Releasable { /** * We use 0 instead of -1 so that left(NIL) works without * condition. @@ -586,4 +588,8 @@ int size() { } + @Override + public void close() { + Releasables.close(parent, left, right, depth); + } } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java index 1649af041ee19..f2ccfc33aa2a9 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java @@ -21,6 +21,7 @@ package org.elasticsearch.tdigest; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tdigest.arrays.TDigestArrays; import org.elasticsearch.tdigest.arrays.TDigestDoubleArray; import org.elasticsearch.tdigest.arrays.TDigestIntArray; @@ -66,8 +67,6 @@ * what the AVLTreeDigest uses and no dynamic allocation is required at all. */ public class MergingDigest extends AbstractTDigest { - private final TDigestArrays arrays; - private int mergeCount = 0; private final double publicCompression; @@ -138,8 +137,6 @@ public MergingDigest(TDigestArrays arrays, double compression, int bufferSize) { * @param size Size of main buffer */ public MergingDigest(TDigestArrays arrays, double compression, int bufferSize, int size) { - this.arrays = arrays; - // ensure compression >= 10 // default size = 2 * ceil(compression) // default bufferSize = 5 * size @@ -274,9 +271,6 @@ private void merge( incomingWeight.set(incomingCount, weight, 0, lastUsedCell); incomingCount += lastUsedCell; - if (incomingOrder == null) { - incomingOrder = arrays.newIntArray(incomingCount); - } Sort.stableSort(incomingOrder, incomingMean, incomingCount); totalWeight += unmergedWeight; @@ -581,4 +575,9 @@ public String toString() { + "-" + (useTwoLevelCompression ? "twoLevel" : "oneLevel"); } + + @Override + public void close() { + Releasables.close(weight, mean, tempWeight, tempMean, order); + } } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java index 94b5c667e0672..f063ca9a511c6 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java @@ -19,6 +19,7 @@ package org.elasticsearch.tdigest; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tdigest.arrays.TDigestArrays; import org.elasticsearch.tdigest.arrays.TDigestDoubleArray; @@ -137,4 +138,9 @@ public void reserve(long size) { public int byteSize() { return values.size() * 8; } + + @Override + public void close() { + Releasables.close(values); + } } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java index 4e79f9e68cd02..e578a688738cb 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java @@ -21,6 +21,7 @@ package org.elasticsearch.tdigest; +import org.elasticsearch.core.Releasable; import org.elasticsearch.tdigest.arrays.TDigestArrays; import java.util.Collection; @@ -37,7 +38,7 @@ * - test coverage roughly at 90% * - easy to adapt for use with map-reduce */ -public abstract class TDigest { +public abstract class TDigest implements Releasable { protected ScaleFunction scale = ScaleFunction.K_2; double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestByteArray.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestByteArray.java index 481dde9784008..ae8e84800b433 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestByteArray.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestByteArray.java @@ -21,10 +21,12 @@ package org.elasticsearch.tdigest.arrays; +import org.elasticsearch.core.Releasable; + /** * Minimal interface for ByteArray-like classes used within TDigest. */ -public interface TDigestByteArray { +public interface TDigestByteArray extends Releasable { int size(); byte get(int index); diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestDoubleArray.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestDoubleArray.java index 92530db5e7dc4..1699dbd9beaf1 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestDoubleArray.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestDoubleArray.java @@ -21,10 +21,12 @@ package org.elasticsearch.tdigest.arrays; +import org.elasticsearch.core.Releasable; + /** * Minimal interface for DoubleArray-like classes used within TDigest. */ -public interface TDigestDoubleArray { +public interface TDigestDoubleArray extends Releasable { int size(); double get(int index); diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestIntArray.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestIntArray.java index c944a4f8faf07..44e366aacd173 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestIntArray.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestIntArray.java @@ -21,10 +21,12 @@ package org.elasticsearch.tdigest.arrays; +import org.elasticsearch.core.Releasable; + /** * Minimal interface for IntArray-like classes used within TDigest. */ -public interface TDigestIntArray { +public interface TDigestIntArray extends Releasable { int size(); int get(int index); diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestLongArray.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestLongArray.java index 7e75dd512e86d..5deea6b28b1ed 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestLongArray.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/TDigestLongArray.java @@ -21,10 +21,12 @@ package org.elasticsearch.tdigest.arrays; +import org.elasticsearch.core.Releasable; + /** * Minimal interface for LongArray-like classes used within TDigest. */ -public interface TDigestLongArray { +public interface TDigestLongArray extends Releasable { int size(); long get(int index); diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/WrapperTDigestArrays.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/WrapperTDigestArrays.java deleted file mode 100644 index ce2dd4f8d8e1d..0000000000000 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/arrays/WrapperTDigestArrays.java +++ /dev/null @@ -1,258 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - * This project is based on a modification of https://github.com/tdunning/t-digest which is licensed under the Apache 2.0 License. - */ - -package org.elasticsearch.tdigest.arrays; - -import java.util.Arrays; - -/** - * Temporal TDigestArrays with raw arrays. - * - *

- * Delete after the right implementation for BigArrays is made. - *

- */ -public class WrapperTDigestArrays implements TDigestArrays { - - public static final WrapperTDigestArrays INSTANCE = new WrapperTDigestArrays(); - - private WrapperTDigestArrays() {} - - @Override - public WrapperTDigestDoubleArray newDoubleArray(int initialCapacity) { - return new WrapperTDigestDoubleArray(initialCapacity); - } - - @Override - public WrapperTDigestIntArray newIntArray(int initialSize) { - return new WrapperTDigestIntArray(initialSize); - } - - @Override - public TDigestLongArray newLongArray(int initialSize) { - return new WrapperTDigestLongArray(initialSize); - } - - @Override - public TDigestByteArray newByteArray(int initialSize) { - return new WrapperTDigestByteArray(initialSize); - } - - public WrapperTDigestDoubleArray newDoubleArray(double[] array) { - return new WrapperTDigestDoubleArray(array); - } - - public WrapperTDigestIntArray newIntArray(int[] array) { - return new WrapperTDigestIntArray(array); - } - - public static class WrapperTDigestDoubleArray implements TDigestDoubleArray { - private double[] array; - private int size; - - public WrapperTDigestDoubleArray(int initialSize) { - this(new double[initialSize]); - } - - public WrapperTDigestDoubleArray(double[] array) { - this.array = array; - this.size = array.length; - } - - @Override - public int size() { - return size; - } - - @Override - public double get(int index) { - assert index >= 0 && index < size; - return array[index]; - } - - @Override - public void set(int index, double value) { - assert index >= 0 && index < size; - array[index] = value; - } - - @Override - public void add(double value) { - ensureCapacity(size + 1); - array[size++] = value; - } - - @Override - public void sort() { - Arrays.sort(array, 0, size); - } - - @Override - public void ensureCapacity(int requiredCapacity) { - if (requiredCapacity > array.length) { - int newSize = array.length + (array.length >> 1); - if (newSize < requiredCapacity) { - newSize = requiredCapacity; - } - double[] newArray = new double[newSize]; - System.arraycopy(array, 0, newArray, 0, size); - array = newArray; - } - } - - @Override - public void resize(int newSize) { - if (newSize > array.length) { - array = Arrays.copyOf(array, newSize); - } - if (newSize > size) { - Arrays.fill(array, size, newSize, 0); - } - size = newSize; - } - } - - public static class WrapperTDigestIntArray implements TDigestIntArray { - private int[] array; - private int size; - - public WrapperTDigestIntArray(int initialSize) { - this(new int[initialSize]); - } - - public WrapperTDigestIntArray(int[] array) { - this.array = array; - this.size = array.length; - } - - @Override - public int size() { - return size; - } - - @Override - public int get(int index) { - assert index >= 0 && index < size; - return array[index]; - } - - @Override - public void set(int index, int value) { - assert index >= 0 && index < size; - array[index] = value; - } - - @Override - public void resize(int newSize) { - if (newSize > array.length) { - array = Arrays.copyOf(array, newSize); - } - if (newSize > size) { - Arrays.fill(array, size, newSize, 0); - } - size = newSize; - } - } - - public static class WrapperTDigestLongArray implements TDigestLongArray { - private long[] array; - private int size; - - public WrapperTDigestLongArray(int initialSize) { - this(new long[initialSize]); - } - - public WrapperTDigestLongArray(long[] array) { - this.array = array; - this.size = array.length; - } - - @Override - public int size() { - return size; - } - - @Override - public long get(int index) { - assert index >= 0 && index < size; - return array[index]; - } - - @Override - public void set(int index, long value) { - assert index >= 0 && index < size; - array[index] = value; - } - - @Override - public void resize(int newSize) { - if (newSize > array.length) { - array = Arrays.copyOf(array, newSize); - } - if (newSize > size) { - Arrays.fill(array, size, newSize, 0); - } - size = newSize; - } - } - - public static class WrapperTDigestByteArray implements TDigestByteArray { - private byte[] array; - private int size; - - public WrapperTDigestByteArray(int initialSize) { - this(new byte[initialSize]); - } - - public WrapperTDigestByteArray(byte[] array) { - this.array = array; - this.size = array.length; - } - - @Override - public int size() { - return size; - } - - @Override - public byte get(int index) { - assert index >= 0 && index < size; - return array[index]; - } - - @Override - public void set(int index, byte value) { - assert index >= 0 && index < size; - array[index] = value; - } - - @Override - public void resize(int newSize) { - if (newSize > array.length) { - array = Arrays.copyOf(array, newSize); - } - if (newSize > size) { - Arrays.fill(array, size, newSize, (byte) 0); - } - size = newSize; - } - } -} diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AVLGroupTreeTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AVLGroupTreeTests.java index 71be849f401f4..7ac55afd87808 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AVLGroupTreeTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AVLGroupTreeTests.java @@ -21,13 +21,10 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; -import org.elasticsearch.test.ESTestCase; - -public class AVLGroupTreeTests extends ESTestCase { +public class AVLGroupTreeTests extends TDigestTestCase { public void testSimpleAdds() { - AVLGroupTree x = new AVLGroupTree(WrapperTDigestArrays.INSTANCE); + AVLGroupTree x = new AVLGroupTree(arrays()); assertEquals(IntAVLTree.NIL, x.floor(34)); assertEquals(IntAVLTree.NIL, x.first()); assertEquals(IntAVLTree.NIL, x.last()); @@ -46,7 +43,7 @@ public void testSimpleAdds() { } public void testBalancing() { - AVLGroupTree x = new AVLGroupTree(WrapperTDigestArrays.INSTANCE); + AVLGroupTree x = new AVLGroupTree(arrays()); for (int i = 0; i < 101; i++) { x.add(new Centroid(i)); } @@ -60,7 +57,7 @@ public void testBalancing() { public void testFloor() { // mostly tested in other tests - AVLGroupTree x = new AVLGroupTree(WrapperTDigestArrays.INSTANCE); + AVLGroupTree x = new AVLGroupTree(arrays()); for (int i = 0; i < 101; i++) { x.add(new Centroid(i / 2)); } @@ -73,7 +70,7 @@ public void testFloor() { } public void testHeadSum() { - AVLGroupTree x = new AVLGroupTree(WrapperTDigestArrays.INSTANCE); + AVLGroupTree x = new AVLGroupTree(arrays()); for (int i = 0; i < 1000; ++i) { x.add(randomDouble(), randomIntBetween(1, 10)); } @@ -88,7 +85,7 @@ public void testHeadSum() { } public void testFloorSum() { - AVLGroupTree x = new AVLGroupTree(WrapperTDigestArrays.INSTANCE); + AVLGroupTree x = new AVLGroupTree(arrays()); int total = 0; for (int i = 0; i < 1000; ++i) { int count = randomIntBetween(1, 10); diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AVLTreeDigestTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AVLTreeDigestTests.java index 3cd89de4746f1..f6dde4e168291 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AVLTreeDigestTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AVLTreeDigestTests.java @@ -21,13 +21,11 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; - public class AVLTreeDigestTests extends TDigestTests { protected DigestFactory factory(final double compression) { return () -> { - AVLTreeDigest digest = new AVLTreeDigest(WrapperTDigestArrays.INSTANCE, compression); + AVLTreeDigest digest = new AVLTreeDigest(arrays(), compression); digest.setRandomSeed(randomLong()); return digest; }; diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AlternativeMergeTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AlternativeMergeTests.java index 4b95e9c0ee695..0d095ec37fa45 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AlternativeMergeTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/AlternativeMergeTests.java @@ -21,15 +21,12 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; -import org.elasticsearch.test.ESTestCase; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; -public class AlternativeMergeTests extends ESTestCase { +public class AlternativeMergeTests extends TDigestTestCase { /** * Computes size using the alternative scaling limit for both an idealized merge and for * a MergingDigest. @@ -37,8 +34,8 @@ public class AlternativeMergeTests extends ESTestCase { public void testMerges() { for (int n : new int[] { 100, 1000, 10000, 100000 }) { for (double compression : new double[] { 50, 100, 200, 400 }) { - MergingDigest mergingDigest = new MergingDigest(WrapperTDigestArrays.INSTANCE, compression); - AVLTreeDigest treeDigest = new AVLTreeDigest(WrapperTDigestArrays.INSTANCE, compression); + MergingDigest mergingDigest = new MergingDigest(arrays(), compression); + AVLTreeDigest treeDigest = new AVLTreeDigest(arrays(), compression); List data = new ArrayList<>(); Random gen = random(); for (int i = 0; i < n; i++) { diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTests.java index 68b07f1096eea..7520d76172ef9 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTests.java @@ -21,9 +21,7 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.test.ESTestCase; - -public abstract class BigCountTests extends ESTestCase { +public abstract class BigCountTests extends TDigestTestCase { public void testBigMerge() { TDigest digest = createDigest(); diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsMergingDigestTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsMergingDigestTests.java index 25cd1af05a0ba..ab28628200cce 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsMergingDigestTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsMergingDigestTests.java @@ -21,11 +21,9 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; - public class BigCountTestsMergingDigestTests extends BigCountTests { @Override public TDigest createDigest() { - return new MergingDigest(WrapperTDigestArrays.INSTANCE, 100); + return new MergingDigest(arrays(), 100); } } diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsTreeDigestTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsTreeDigestTests.java index a2cdf49d8f8ad..a9af82164c2ba 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsTreeDigestTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsTreeDigestTests.java @@ -21,11 +21,9 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; - public class BigCountTestsTreeDigestTests extends BigCountTests { @Override public TDigest createDigest() { - return new AVLTreeDigest(WrapperTDigestArrays.INSTANCE, 100); + return new AVLTreeDigest(arrays(), 100); } } diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/ComparisonTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/ComparisonTests.java index f5df0c2f86ea1..82620459891ec 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/ComparisonTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/ComparisonTests.java @@ -21,13 +21,10 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; -import org.elasticsearch.test.ESTestCase; - import java.util.Arrays; import java.util.function.Supplier; -public class ComparisonTests extends ESTestCase { +public class ComparisonTests extends TDigestTestCase { private static final int SAMPLE_COUNT = 1_000_000; @@ -40,10 +37,10 @@ public class ComparisonTests extends ESTestCase { private void loadData(Supplier sampleGenerator) { final int COMPRESSION = 100; - avlTreeDigest = TDigest.createAvlTreeDigest(WrapperTDigestArrays.INSTANCE, COMPRESSION); - mergingDigest = TDigest.createMergingDigest(WrapperTDigestArrays.INSTANCE, COMPRESSION); - sortingDigest = TDigest.createSortingDigest(WrapperTDigestArrays.INSTANCE); - hybridDigest = TDigest.createHybridDigest(WrapperTDigestArrays.INSTANCE, COMPRESSION); + avlTreeDigest = TDigest.createAvlTreeDigest(arrays(), COMPRESSION); + mergingDigest = TDigest.createMergingDigest(arrays(), COMPRESSION); + sortingDigest = TDigest.createSortingDigest(arrays()); + hybridDigest = TDigest.createHybridDigest(arrays(), COMPRESSION); samples = new double[SAMPLE_COUNT]; for (int i = 0; i < SAMPLE_COUNT; i++) { diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/IntAVLTreeTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/IntAVLTreeTests.java index 58c91ae6e03e6..5178701e96c2c 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/IntAVLTreeTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/IntAVLTreeTests.java @@ -21,8 +21,7 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.tdigest.arrays.TDigestArrays; import java.util.Arrays; import java.util.Iterator; @@ -30,7 +29,7 @@ import java.util.Random; import java.util.TreeMap; -public class IntAVLTreeTests extends ESTestCase { +public class IntAVLTreeTests extends TDigestTestCase { static class IntegerBag extends IntAVLTree { @@ -38,8 +37,8 @@ static class IntegerBag extends IntAVLTree { int[] values; int[] counts; - IntegerBag() { - super(WrapperTDigestArrays.INSTANCE); + IntegerBag(TDigestArrays arrays) { + super(arrays); values = new int[capacity()]; counts = new int[capacity()]; } @@ -89,7 +88,7 @@ protected void merge(int node) { public void testDualAdd() { Random r = random(); TreeMap map = new TreeMap<>(); - IntegerBag bag = new IntegerBag(); + IntegerBag bag = new IntegerBag(arrays()); for (int i = 0; i < 100000; ++i) { final int v = r.nextInt(100000); if (map.containsKey(v)) { @@ -112,7 +111,7 @@ public void testDualAdd() { public void testDualAddRemove() { Random r = random(); TreeMap map = new TreeMap<>(); - IntegerBag bag = new IntegerBag(); + IntegerBag bag = new IntegerBag(arrays()); for (int i = 0; i < 100000; ++i) { final int v = r.nextInt(1000); if (r.nextBoolean()) { diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/MedianTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/MedianTests.java index dd455b307344e..c8acec935c040 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/MedianTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/MedianTests.java @@ -21,14 +21,11 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; -import org.elasticsearch.test.ESTestCase; - -public class MedianTests extends ESTestCase { +public class MedianTests extends TDigestTestCase { public void testAVL() { double[] data = new double[] { 7, 15, 36, 39, 40, 41 }; - TDigest digest = new AVLTreeDigest(WrapperTDigestArrays.INSTANCE, 100); + TDigest digest = new AVLTreeDigest(arrays(), 100); for (double value : data) { digest.add(value); } @@ -39,7 +36,7 @@ public void testAVL() { public void testMergingDigest() { double[] data = new double[] { 7, 15, 36, 39, 40, 41 }; - TDigest digest = new MergingDigest(WrapperTDigestArrays.INSTANCE, 100); + TDigest digest = new MergingDigest(arrays(), 100); for (double value : data) { digest.add(value); } @@ -50,7 +47,7 @@ public void testMergingDigest() { public void testSortingDigest() { double[] data = new double[] { 7, 15, 36, 39, 40, 41 }; - TDigest digest = new SortingDigest(WrapperTDigestArrays.INSTANCE); + TDigest digest = new SortingDigest(arrays()); for (double value : data) { digest.add(value); } @@ -61,7 +58,7 @@ public void testSortingDigest() { public void testHybridDigest() { double[] data = new double[] { 7, 15, 36, 39, 40, 41 }; - TDigest digest = new HybridDigest(WrapperTDigestArrays.INSTANCE, 100); + TDigest digest = new HybridDigest(arrays(), 100); for (double value : data) { digest.add(value); } diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/SortTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/SortTests.java index 7327dfb5aac3c..425e4d1497eda 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/SortTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/SortTests.java @@ -22,22 +22,20 @@ package org.elasticsearch.tdigest; import org.elasticsearch.tdigest.arrays.TDigestIntArray; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; -import org.elasticsearch.test.ESTestCase; import java.util.HashMap; import java.util.Map; import java.util.Random; -public class SortTests extends ESTestCase { +public class SortTests extends TDigestTestCase { public void testReverse() { - TDigestIntArray x = WrapperTDigestArrays.INSTANCE.newIntArray(0); + TDigestIntArray x = arrays().newIntArray(0); // don't crash with no input Sort.reverse(x, 0, x.size()); // reverse stuff! - x = WrapperTDigestArrays.INSTANCE.newIntArray(new int[] { 1, 2, 3, 4, 5 }); + x = arrays().newIntArray(new int[] { 1, 2, 3, 4, 5 }); Sort.reverse(x, 0, x.size()); for (int i = 0; i < 5; i++) { assertEquals(5 - i, x.get(i)); @@ -59,7 +57,7 @@ public void testReverse() { assertEquals(4, x.get(3)); assertEquals(1, x.get(4)); - x = WrapperTDigestArrays.INSTANCE.newIntArray(new int[] { 1, 2, 3, 4, 5, 6 }); + x = arrays().newIntArray(new int[] { 1, 2, 3, 4, 5, 6 }); Sort.reverse(x, 0, x.size()); for (int i = 0; i < 6; i++) { assertEquals(6 - i, x.get(i)); @@ -229,8 +227,8 @@ private void checkOrder(int[] order, double[] values) { } private void sort(int[] order, double[] values, int n) { - var wrappedOrder = WrapperTDigestArrays.INSTANCE.newIntArray(order); - var wrappedValues = WrapperTDigestArrays.INSTANCE.newDoubleArray(values); + var wrappedOrder = arrays().newIntArray(order); + var wrappedValues = arrays().newDoubleArray(values); Sort.stableSort(wrappedOrder, wrappedValues, n); } diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/TDigestTestCase.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/TDigestTestCase.java new file mode 100644 index 0000000000000..76db01d5dd0bf --- /dev/null +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/TDigestTestCase.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/tdunning/t-digest which is licensed under the Apache 2.0 License. + */ + +package org.elasticsearch.tdigest; + +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.search.aggregations.metrics.MemoryTrackingTDigestArrays; +import org.elasticsearch.tdigest.arrays.TDigestArrays; +import org.elasticsearch.tdigest.arrays.TDigestByteArray; +import org.elasticsearch.tdigest.arrays.TDigestDoubleArray; +import org.elasticsearch.tdigest.arrays.TDigestIntArray; +import org.elasticsearch.tdigest.arrays.TDigestLongArray; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; + +import java.util.Collection; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Base class for TDigest tests that require {@link TDigestArrays} instances. + *

+ * This class provides arrays that will be automatically closed after the test. + * It will also test that all memory have been freed, as the arrays use a counting CircuitBreaker. + *

+ */ +public abstract class TDigestTestCase extends ESTestCase { + private final Collection trackedArrays = ConcurrentHashMap.newKeySet(); + + /** + * Create a new TDigestArrays instance with a limited breaker. This method may be called multiple times. + * + *

+ * The arrays created by this method will be automatically released after the test. + *

+ */ + protected DelegatingTDigestArrays arrays() { + return new DelegatingTDigestArrays(); + } + + /** + * Release all arrays before {@link ESTestCase} checks for unreleased bytes. + */ + @After + public void releaseArrays() { + Releasables.close(trackedArrays); + trackedArrays.clear(); + } + + private T register(T releasable) { + trackedArrays.add(releasable); + return releasable; + } + + protected final class DelegatingTDigestArrays implements TDigestArrays { + private final MemoryTrackingTDigestArrays delegate; + + DelegatingTDigestArrays() { + this.delegate = new MemoryTrackingTDigestArrays(newLimitedBreaker(ByteSizeValue.ofMb(100))); + } + + public TDigestDoubleArray newDoubleArray(double[] data) { + return register(delegate.newDoubleArray(data)); + } + + @Override + public TDigestDoubleArray newDoubleArray(int size) { + return register(delegate.newDoubleArray(size)); + } + + public TDigestIntArray newIntArray(int[] data) { + return register(delegate.newIntArray(data)); + } + + @Override + public TDigestIntArray newIntArray(int size) { + return register(delegate.newIntArray(size)); + } + + @Override + public TDigestLongArray newLongArray(int size) { + return register(delegate.newLongArray(size)); + } + + @Override + public TDigestByteArray newByteArray(int initialSize) { + return register(delegate.newByteArray(initialSize)); + } + } +} diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/TDigestTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/TDigestTests.java index 43f1e36afb314..89a0c037dc864 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/TDigestTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/TDigestTests.java @@ -21,10 +21,6 @@ package org.elasticsearch.tdigest; -import org.elasticsearch.tdigest.arrays.TDigestArrays; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; -import org.elasticsearch.test.ESTestCase; - import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -34,7 +30,7 @@ /** * Base test case for TDigests, just extend this class and implement the abstract methods. */ -public abstract class TDigestTests extends ESTestCase { +public abstract class TDigestTests extends TDigestTestCase { public interface DigestFactory { TDigest create(); @@ -544,8 +540,4 @@ public void testMonotonicity() { lastQuantile = q; } } - - protected static TDigestArrays arrays() { - return WrapperTDigestArrays.INSTANCE; - } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/MediaTypeRegistry.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/MediaTypeRegistry.java index 88724062bb452..ae2c80a136437 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/MediaTypeRegistry.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/MediaTypeRegistry.java @@ -27,7 +27,7 @@ * A MediaType can have only one query parameter representation. * For example "json" (case insensitive) maps back to a JSON media type. * - * Additionally, a http header may optionally have parameters. For example "application/vnd.elasticsearch+json; compatible-with=7". + * Additionally, a http header may optionally have parameters. For example "application/vnd.elasticsearch+json; compatible-with=8". * This class also allows to define a regular expression for valid values of charset. */ public class MediaTypeRegistry { diff --git a/libs/x-content/src/test/java/org/elasticsearch/xcontent/ParsedMediaTypeTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/ParsedMediaTypeTests.java index 9fafd7c7e7150..8cecd3d25201f 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/xcontent/ParsedMediaTypeTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/ParsedMediaTypeTests.java @@ -30,19 +30,19 @@ public void testCanonicalParsing() { assertThat(ParsedMediaType.parseMediaType("application/cbor").toMediaType(mediaTypeRegistry), equalTo(XContentType.CBOR)); assertThat( - ParsedMediaType.parseMediaType("application/vnd.elasticsearch+json;compatible-with=7").toMediaType(mediaTypeRegistry), + ParsedMediaType.parseMediaType("application/vnd.elasticsearch+json;compatible-with=8").toMediaType(mediaTypeRegistry), equalTo(XContentType.VND_JSON) ); assertThat( - ParsedMediaType.parseMediaType("application/vnd.elasticsearch+yaml;compatible-with=7").toMediaType(mediaTypeRegistry), + ParsedMediaType.parseMediaType("application/vnd.elasticsearch+yaml;compatible-with=8").toMediaType(mediaTypeRegistry), equalTo(XContentType.VND_YAML) ); assertThat( - ParsedMediaType.parseMediaType("application/vnd.elasticsearch+smile;compatible-with=7").toMediaType(mediaTypeRegistry), + ParsedMediaType.parseMediaType("application/vnd.elasticsearch+smile;compatible-with=8").toMediaType(mediaTypeRegistry), equalTo(XContentType.VND_SMILE) ); assertThat( - ParsedMediaType.parseMediaType("application/vnd.elasticsearch+cbor;compatible-with=7").toMediaType(mediaTypeRegistry), + ParsedMediaType.parseMediaType("application/vnd.elasticsearch+cbor;compatible-with=8").toMediaType(mediaTypeRegistry), equalTo(XContentType.VND_CBOR) ); } @@ -179,19 +179,19 @@ public void testParseMediaTypeFromXContentType() { ); assertThat( - ParsedMediaType.parseMediaType(XContentType.VND_JSON, Map.of("compatible-with", "7")).toMediaType(mediaTypeRegistry), + ParsedMediaType.parseMediaType(XContentType.VND_JSON, Map.of("compatible-with", "8")).toMediaType(mediaTypeRegistry), equalTo(XContentType.VND_JSON) ); assertThat( - ParsedMediaType.parseMediaType(XContentType.VND_YAML, Map.of("compatible-with", "7")).toMediaType(mediaTypeRegistry), + ParsedMediaType.parseMediaType(XContentType.VND_YAML, Map.of("compatible-with", "8")).toMediaType(mediaTypeRegistry), equalTo(XContentType.VND_YAML) ); assertThat( - ParsedMediaType.parseMediaType(XContentType.VND_SMILE, Map.of("compatible-with", "7")).toMediaType(mediaTypeRegistry), + ParsedMediaType.parseMediaType(XContentType.VND_SMILE, Map.of("compatible-with", "8")).toMediaType(mediaTypeRegistry), equalTo(XContentType.VND_SMILE) ); assertThat( - ParsedMediaType.parseMediaType(XContentType.VND_CBOR, Map.of("compatible-with", "7")).toMediaType(mediaTypeRegistry), + ParsedMediaType.parseMediaType(XContentType.VND_CBOR, Map.of("compatible-with", "8")).toMediaType(mediaTypeRegistry), equalTo(XContentType.VND_CBOR) ); } @@ -215,20 +215,20 @@ public void testResponseContentTypeHeader() { ); assertThat( - ParsedMediaType.parseMediaType(XContentType.VND_JSON, Map.of("compatible-with", "7")).responseContentTypeHeader(), - equalTo("application/vnd.elasticsearch+json;compatible-with=7") + ParsedMediaType.parseMediaType(XContentType.VND_JSON, Map.of("compatible-with", "8")).responseContentTypeHeader(), + equalTo("application/vnd.elasticsearch+json;compatible-with=8") ); assertThat( - ParsedMediaType.parseMediaType(XContentType.VND_YAML, Map.of("compatible-with", "7")).responseContentTypeHeader(), - equalTo("application/vnd.elasticsearch+yaml;compatible-with=7") + ParsedMediaType.parseMediaType(XContentType.VND_YAML, Map.of("compatible-with", "8")).responseContentTypeHeader(), + equalTo("application/vnd.elasticsearch+yaml;compatible-with=8") ); assertThat( - ParsedMediaType.parseMediaType(XContentType.VND_SMILE, Map.of("compatible-with", "7")).responseContentTypeHeader(), - equalTo("application/vnd.elasticsearch+smile;compatible-with=7") + ParsedMediaType.parseMediaType(XContentType.VND_SMILE, Map.of("compatible-with", "8")).responseContentTypeHeader(), + equalTo("application/vnd.elasticsearch+smile;compatible-with=8") ); assertThat( - ParsedMediaType.parseMediaType(XContentType.VND_CBOR, Map.of("compatible-with", "7")).responseContentTypeHeader(), - equalTo("application/vnd.elasticsearch+cbor;compatible-with=7") + ParsedMediaType.parseMediaType(XContentType.VND_CBOR, Map.of("compatible-with", "8")).responseContentTypeHeader(), + equalTo("application/vnd.elasticsearch+cbor;compatible-with=8") ); assertThat( diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ASCIIFoldingTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ASCIIFoldingTokenFilterFactoryTests.java index a301bc1c851a7..c3017bd3ea237 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ASCIIFoldingTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ASCIIFoldingTokenFilterFactoryTests.java @@ -21,6 +21,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java index 001f54ee238d4..ab26112005bd6 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java @@ -20,6 +20,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + /** * Base class to test {@link WordDelimiterTokenFilterFactory} and * {@link WordDelimiterGraphTokenFilterFactory}. diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CJKFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CJKFilterFactoryTests.java index 16614f056c05a..95b093b03f9a7 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CJKFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CJKFilterFactoryTests.java @@ -22,6 +22,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class CJKFilterFactoryTests extends ESTokenStreamTestCase { private static final String RESOURCE = "/org/elasticsearch/analysis/common/cjk_analysis.json"; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactoryTests.java index d5a36b110a7be..ee9701b89c127 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactoryTests.java @@ -24,6 +24,8 @@ import java.io.StringReader; import java.util.Arrays; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class CharGroupTokenizerFactoryTests extends ESTokenStreamTestCase { public void testParseTokenChars() { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java index 115ed1522381a..77902a2ab982f 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java @@ -27,6 +27,8 @@ import java.nio.file.Files; import java.nio.file.Path; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.builder() diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESSolrSynonymParserTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESSolrSynonymParserTests.java index db8b5c92165a2..b0cda5620c3e5 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESSolrSynonymParserTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESSolrSynonymParserTests.java @@ -23,6 +23,7 @@ import java.io.StringReader; import java.text.ParseException; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.containsString; public class ESSolrSynonymParserTests extends ESTokenStreamTestCase { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESWordnetSynonymParserTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESWordnetSynonymParserTests.java index 16c6aa256009b..17455c431992f 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESWordnetSynonymParserTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ESWordnetSynonymParserTests.java @@ -23,6 +23,7 @@ import java.io.StringReader; import java.text.ParseException; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.containsString; public class ESWordnetSynonymParserTests extends ESTokenStreamTestCase { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactoryTests.java index c4e695cabf695..446cee8f48379 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactoryTests.java @@ -21,6 +21,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class EdgeNGramTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java index 329318a096efb..11d1653439e59 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java @@ -29,6 +29,9 @@ import java.io.StringReader; import java.util.Collections; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase { private static IndexAnalyzers buildAnalyzers(IndexVersion version, String tokenizer) throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/FingerprintAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/FingerprintAnalyzerTests.java index 8049c09025cf2..8783860b8e02e 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/FingerprintAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/FingerprintAnalyzerTests.java @@ -13,6 +13,8 @@ import org.apache.lucene.analysis.CharArraySet; import org.elasticsearch.test.ESTokenStreamTestCase; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { public void testFingerprint() throws Exception { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/FlattenGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/FlattenGraphTokenFilterFactoryTests.java index d6c2792af7de7..2f3dd1917ebe2 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/FlattenGraphTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/FlattenGraphTokenFilterFactoryTests.java @@ -20,6 +20,8 @@ import java.io.IOException; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class FlattenGraphTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testBasic() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java index 5d84457df1495..a3c9eb2cf3aae 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.instanceOf; public class KeepFilterFactoryTests extends ESTokenStreamTestCase { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java index 5a7ead779621e..e499f6f7eebdc 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.instanceOf; public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeywordMarkerFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeywordMarkerFilterFactoryTests.java index c249db706a189..8ede58ae2082b 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeywordMarkerFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeywordMarkerFilterFactoryTests.java @@ -18,11 +18,11 @@ import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.test.ESTestCase.TestAnalysis; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; import static org.hamcrest.Matchers.instanceOf; /** diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/LimitTokenCountFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/LimitTokenCountFilterFactoryTests.java index c3a9531b4a2ed..ee117de653d95 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/LimitTokenCountFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/LimitTokenCountFilterFactoryTests.java @@ -21,6 +21,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.builder() diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MinHashFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MinHashFilterFactoryTests.java index aff05dbc4d3a3..020b78a50b213 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MinHashFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MinHashFilterFactoryTests.java @@ -21,6 +21,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertStreamHasNumberOfTokens; + public class MinHashFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { int default_hash_count = 1; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java index 7436263f8df9e..eb9032061d134 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java @@ -26,6 +26,8 @@ import java.io.IOException; import java.util.Collections; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase { public void testMultiplexingFilter() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenFilterFactoryTests.java index 4b0232ed95e0e..ef02f91c30a40 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenFilterFactoryTests.java @@ -21,6 +21,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class NGramTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java index 9c4286d40db77..8c365a1362f85 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java @@ -28,7 +28,7 @@ import java.io.StringReader; import java.util.Arrays; -import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.instanceOf; public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { @@ -183,6 +183,9 @@ public void testBackwardsCompatibilityEdgeNgramTokenFilter() throws Exception { assertThat(edgeNGramTokenFilter, instanceOf(EdgeNGramTokenFilter.class)); } } + assertWarnings( + "The [side] parameter is deprecated and will be removed. Use a [reverse] before and after the [edge_ngram] instead." + ); } /*` diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactoryTests.java index b36bb18529109..5121c6390ceb0 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactoryTests.java @@ -20,6 +20,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class PathHierarchyTokenizerFactoryTests extends ESTokenStreamTestCase { public void testDefaults() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java index 6c13c4eac4ab7..91fd74bca9c93 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java @@ -18,6 +18,9 @@ import java.util.Arrays; import java.util.regex.Pattern; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.checkRandomData; + /** * Verifies the behavior of PatternAnalyzer. */ diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternCaptureTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternCaptureTokenFilterTests.java index 4ac4b44d8ffdd..80a270b033678 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternCaptureTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternCaptureTokenFilterTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; -import static org.elasticsearch.test.ESTestCase.createTestAnalysis; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.containsString; public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternReplaceTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternReplaceTokenFilterTests.java index 48434461fc151..91637f1cb9449 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternReplaceTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternReplaceTokenFilterTests.java @@ -19,6 +19,8 @@ import java.io.IOException; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class PatternReplaceTokenFilterTests extends ESTokenStreamTestCase { public void testNormalizer() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java index ae8c17decb3b7..40ba9acbc257a 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java @@ -37,6 +37,7 @@ import java.io.IOException; import java.util.Collections; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java index df0c0aa6e7df6..bb37b9bb7f4ef 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java @@ -20,6 +20,7 @@ import java.io.IOException; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.instanceOf; public class RemoveDuplicatesFilterFactoryTests extends ESTokenStreamTestCase { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java index 2a480f7cb4a75..fb5eee96acffb 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -36,6 +36,7 @@ import java.util.Collections; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java index 9153b5d9b3819..8fd8b86047488 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java @@ -13,6 +13,8 @@ import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.elasticsearch.test.ESTokenStreamTestCase; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + public class SnowballAnalyzerTests extends ESTokenStreamTestCase { public void testEnglish() throws Exception { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java index 2266d554fcba6..bbe22ff43d52e 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java @@ -25,6 +25,8 @@ import java.util.List; import java.util.Locale; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class StemmerOverrideTokenFilterFactoryTests extends ESTokenStreamTestCase { @Rule public ExpectedException expectedException = ExpectedException.none(); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java index f9d8dc1aef8ff..a1c95deb65a52 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java @@ -27,7 +27,7 @@ import java.io.IOException; import java.io.StringReader; -import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_VERSION_CREATED; import static org.hamcrest.Matchers.instanceOf; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StopAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StopAnalyzerTests.java index db25d6a0f1845..da84c4814514f 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StopAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StopAnalyzerTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; -import static org.elasticsearch.test.ESTestCase.createTestAnalysis; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; public class StopAnalyzerTests extends ESTokenStreamTestCase { public void testDefaultsCompoundAnalysis() throws Exception { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/TrimTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/TrimTokenFilterTests.java index f3816f43d2b2b..63e9732f99a8a 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/TrimTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/TrimTokenFilterTests.java @@ -19,6 +19,8 @@ import java.io.IOException; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class TrimTokenFilterTests extends ESTokenStreamTestCase { public void testNormalizer() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java index 4a060ab11e2bd..4995fe844c9c5 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java @@ -30,6 +30,9 @@ import java.io.StringReader; import java.util.Collections; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class WordDelimiterGraphTokenFilterFactoryTests extends BaseWordDelimiterTokenFilterFactoryTestCase { public WordDelimiterGraphTokenFilterFactoryTests() { super("word_delimiter_graph"); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java index 2644303991b8d..636174f5c79cc 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java @@ -19,6 +19,8 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; + public class WordDelimiterTokenFilterFactoryTests extends BaseWordDelimiterTokenFilterFactoryTestCase { public WordDelimiterTokenFilterFactoryTests() { super("word_delimiter"); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java index f95815d1daff9..4b9b29d7187e1 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java @@ -73,7 +73,7 @@ private static void waitForLogs(RestClient client) throws Exception { }); } - private static final String LOGS_TEMPLATE = """ + static final String LOGS_TEMPLATE = """ { "index_patterns": [ "logs-*-*" ], "data_stream": {}, @@ -110,7 +110,7 @@ private static void waitForLogs(RestClient client) throws Exception { } }"""; - private static final String LOGS_STANDARD_INDEX_MODE = """ + static final String LOGS_STANDARD_INDEX_MODE = """ { "index_patterns": [ "logs-*-*" ], "data_stream": {}, @@ -143,7 +143,7 @@ private static void waitForLogs(RestClient client) throws Exception { } }"""; - private static final String STANDARD_TEMPLATE = """ + static final String STANDARD_TEMPLATE = """ { "index_patterns": [ "standard-*-*" ], "data_stream": {}, @@ -216,7 +216,7 @@ private static void waitForLogs(RestClient client) throws Exception { } }"""; - private static final String DOC_TEMPLATE = """ + static final String DOC_TEMPLATE = """ { "@timestamp": "%s", "host.name": "%s", @@ -333,6 +333,23 @@ public void testLogsTimeSeriesIndexModeSwitch() throws IOException { ); assertDataStreamBackingIndexMode("logsdb", 0, DATA_STREAM_NAME); + putTemplate(client, "custom-template", LOGS_STANDARD_INDEX_MODE); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(64), + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) + ) + ); + assertDataStreamBackingIndexMode("standard", 1, DATA_STREAM_NAME); + putTemplate(client, "custom-template", TIME_SERIES_TEMPLATE); rolloverDataStream(client, DATA_STREAM_NAME); indexDocument( @@ -348,7 +365,24 @@ public void testLogsTimeSeriesIndexModeSwitch() throws IOException { randomLongBetween(1_000_000L, 2_000_000L) ) ); - assertDataStreamBackingIndexMode("time_series", 1, DATA_STREAM_NAME); + assertDataStreamBackingIndexMode("time_series", 2, DATA_STREAM_NAME); + + putTemplate(client, "custom-template", LOGS_STANDARD_INDEX_MODE); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(64), + randomIp(randomBoolean()), + randomLongBetween(1_000_000L, 2_000_000L) + ) + ); + assertDataStreamBackingIndexMode("standard", 3, DATA_STREAM_NAME); putTemplate(client, "custom-template", LOGS_TEMPLATE); rolloverDataStream(client, DATA_STREAM_NAME); @@ -365,7 +399,7 @@ public void testLogsTimeSeriesIndexModeSwitch() throws IOException { randomLongBetween(1_000_000L, 2_000_000L) ) ); - assertDataStreamBackingIndexMode("logsdb", 2, DATA_STREAM_NAME); + assertDataStreamBackingIndexMode("logsdb", 4, DATA_STREAM_NAME); } public void testLogsDBToStandardReindex() throws IOException { @@ -554,7 +588,7 @@ private void assertDataStreamBackingIndexMode(final String indexMode, int backin assertThat(getSettings(client, getWriteBackingIndex(client, dataStreamName, backingIndex)).get("index.mode"), is(indexMode)); } - private String document( + static String document( final Instant timestamp, final String hostname, long pid, @@ -581,13 +615,13 @@ private static void createDataStream(final RestClient client, final String dataS assertOK(client.performRequest(request)); } - private static void putTemplate(final RestClient client, final String templateName, final String mappings) throws IOException { + static void putTemplate(final RestClient client, final String templateName, final String mappings) throws IOException { final Request request = new Request("PUT", "/_index_template/" + templateName); request.setJsonEntity(mappings); assertOK(client.performRequest(request)); } - private static void indexDocument(final RestClient client, String indexOrtDataStream, String doc) throws IOException { + static void indexDocument(final RestClient client, String indexOrtDataStream, String doc) throws IOException { final Request request = new Request("POST", "/" + indexOrtDataStream + "/_doc?refresh=true"); request.setJsonEntity(doc); final Response response = client.performRequest(request); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MultiClustersIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MultiClustersIT.java new file mode 100644 index 0000000000000..2f80a230d937a --- /dev/null +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MultiClustersIT.java @@ -0,0 +1,363 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams; + +import org.apache.http.HttpHost; +import org.apache.lucene.tests.util.English; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; +import java.util.stream.Stream; + +import static org.elasticsearch.datastreams.AbstractDataStreamIT.createDataStream; +import static org.elasticsearch.datastreams.LogsDataStreamRestIT.LOGS_TEMPLATE; +import static org.elasticsearch.datastreams.LogsDataStreamRestIT.STANDARD_TEMPLATE; +import static org.elasticsearch.datastreams.LogsDataStreamRestIT.indexDocument; +import static org.elasticsearch.datastreams.LogsDataStreamRestIT.putTemplate; +import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class MultiClustersIT extends ESRestTestCase { + static List localLogsDocs = null; + static List remoteLogsDocs = null; + static List localStandardDocs = null; + static List remoteStandardDocs = null; + + public static ElasticsearchCluster remoteCluster = ElasticsearchCluster.local() + .name("remote_cluster") + .distribution(DistributionType.DEFAULT) + .module("data-streams") + .module("x-pack-stack") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("cluster.logsdb.enabled", "true") + .build(); + + public static ElasticsearchCluster localCluster = ElasticsearchCluster.local() + .name("local_cluster") + .distribution(DistributionType.DEFAULT) + .module("data-streams") + .module("x-pack-stack") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("cluster.logsdb.enabled", "true") + .setting("node.roles", "[data,ingest,master,remote_cluster_client]") + .setting("cluster.remote.remote_cluster.seeds", () -> "\"" + remoteCluster.getTransportEndpoint(0) + "\"") + .setting("cluster.remote.connections_per_cluster", "1") + .setting("cluster.remote.remote_cluster.skip_unavailable", "false") + .build(); + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); + + private RestClient localClusterClient() throws IOException { + var clusterHosts = parseClusterHosts(localCluster.getHttpAddresses()); + return buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); + } + + private RestClient remoteClusterClient() throws IOException { + var clusterHosts = parseClusterHosts(remoteCluster.getHttpAddresses()); + return buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); + } + + private record Document(long timestamp, String cluster, String hostName, long pid, String method, long messageId, String message) { + + @SuppressWarnings("unchecked") + static Document fromHit(Map hit) { + long timestamp = DEFAULT_DATE_TIME_FORMATTER.parseMillis(hit.get("@timestamp").toString()); + String cluster = (String) hit.get("cluster"); + String hostName = (String) hit.get("host.name"); + if (hostName == null) { + Map host = (Map) hit.get("host"); + hostName = (String) host.get("name"); + } + long pid = ((Number) hit.get("pid")).longValue(); + String method = (String) hit.get("method"); + long messageId = ((Number) hit.get("message_id")).longValue(); + String message = (String) hit.get("message"); + return new Document(timestamp, cluster, hostName, pid, method, messageId, message); + } + + String toJson() throws IOException { + XContentBuilder builder = JsonXContent.contentBuilder() + .startObject() + .field("@timestamp", timestamp) + .field("cluster", cluster) + .field("host.name", hostName) + .field("pid", pid) + .field("method", method) + .field("message_id", messageId) + .field("message", message) + .endObject(); + return Strings.toString(builder); + } + } + + static String randomHostName() { + return randomFrom("qa-", "staging-", "prod-") + between(1, 3); + } + + static List indexDocuments(RestClient client, String cluster, String index, int startMessageId) throws IOException { + int numDocs = between(0, 100); + List docs = new ArrayList<>(numDocs); + long timestamp = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-09-15T00:00:00Z"); + for (int i = 0; i < numDocs; i++) { + timestamp += between(0, 5) * 1000L; + long pid = randomLongBetween(1, 10); + String method = randomFrom("GET", "PUT", "POST", "DELETE"); + String message = English.intToEnglish(between(1, 1000000)); + docs.add(new Document(timestamp, cluster, randomHostName(), pid, method, startMessageId + i, message)); + } + Randomness.shuffle(docs); + for (Document doc : docs) { + indexDocument(client, index, doc.toJson()); + if (rarely()) { + refresh(client, index); + } + } + refresh(client, index); + return docs; + } + + @Before + public void setUpIndices() throws Exception { + if (localLogsDocs != null) { + return; + } + try (RestClient client = localClusterClient()) { + putTemplate(client, "logs-template", LOGS_TEMPLATE); + putTemplate(client, "standard-template", STANDARD_TEMPLATE); + + createDataStream(client, "logs-apache-kafka"); + localLogsDocs = indexDocuments(client, "local", "logs-apache-kafka", 0); + assertDocCount(client, "logs-apache-kafka", localLogsDocs.size()); + + createDataStream(client, "standard-apache-kafka"); + localStandardDocs = indexDocuments(client, "local", "standard-apache-kafka", 1000); + assertDocCount(client, "standard-apache-kafka", localStandardDocs.size()); + } + try (RestClient client = remoteClusterClient()) { + putTemplate(client, "logs-template", LOGS_TEMPLATE); + putTemplate(client, "standard-template", STANDARD_TEMPLATE); + + createDataStream(client, "logs-apache-kafka"); + remoteLogsDocs = indexDocuments(client, "remote", "logs-apache-kafka", 2000); + assertDocCount(client, "logs-apache-kafka", remoteLogsDocs.size()); + + createDataStream(client, "standard-apache-kafka"); + remoteStandardDocs = indexDocuments(client, "remote", "standard-apache-kafka", 3000); + assertDocCount(client, "standard-apache-kafka", remoteStandardDocs.size()); + } + } + + public void testSource() throws IOException { + XContentBuilder searchSource = JsonXContent.contentBuilder().startObject().field("_source", true).field("size", 500); + final boolean sorted = randomBoolean(); + if (sorted) { + searchSource.startArray("sort"); + searchSource.value("message_id"); + searchSource.endArray(); + } + final Predicate filterHost; + if (randomBoolean()) { + String host = randomHostName(); + filterHost = s -> s.equals(host); + searchSource.startObject("query"); + searchSource.startObject("term"); + searchSource.startObject("host.name"); + searchSource.field("value", host); + searchSource.endObject(); + searchSource.endObject(); + searchSource.endObject(); + } else { + filterHost = s -> true; + } + searchSource.endObject(); + // remote only + { + var request = new Request("POST", "/*:l*,*:s*/_search"); + request.setJsonEntity(Strings.toString(searchSource)); + if (randomBoolean()) { + request.addParameter("ccs_minimize_roundtrips", Boolean.toString(randomBoolean())); + } + Response resp = client().performRequest(request); + assertOK(resp); + Stream hits = extractHits(resp).stream().map(Document::fromHit); + if (sorted == false) { + hits = hits.sorted(Comparator.comparingLong(Document::messageId)); + } + var expectedHits = Stream.of(remoteLogsDocs, remoteStandardDocs) + .flatMap(Collection::stream) + .filter(d -> filterHost.test(d.hostName)) + .sorted(Comparator.comparingLong(Document::messageId)) + .toList(); + assertThat(hits.toList(), equalTo(expectedHits)); + } + // both clusters + { + var request = new Request("POST", "/*,*:*/_search"); + request.setJsonEntity(Strings.toString(searchSource)); + if (randomBoolean()) { + request.addParameter("ccs_minimize_roundtrips", Boolean.toString(randomBoolean())); + } + Response resp = client().performRequest(request); + assertOK(resp); + Stream hits = extractHits(resp).stream().map(Document::fromHit); + if (sorted == false) { + hits = hits.sorted(Comparator.comparingLong(Document::messageId)); + } + var expectedHits = Stream.of(localLogsDocs, localStandardDocs, remoteLogsDocs, remoteStandardDocs) + .flatMap(Collection::stream) + .filter(d -> filterHost.test(d.hostName)) + .sorted(Comparator.comparingLong(Document::messageId)) + .toList(); + assertThat(hits.toList(), equalTo(expectedHits)); + } + + } + + public void testFields() throws IOException { + XContentBuilder searchSource = JsonXContent.contentBuilder() + .startObject() + .array("fields", "message_id", "host.name") + .field("size", 500); + final boolean sorted = randomBoolean(); + if (sorted) { + searchSource.startArray("sort"); + searchSource.value("message_id"); + searchSource.endArray(); + } + final Predicate filterHost; + if (randomBoolean()) { + String host = randomHostName(); + filterHost = s -> s.equals(host); + searchSource.startObject("query"); + searchSource.startObject("term"); + searchSource.startObject("host.name"); + searchSource.field("value", host); + searchSource.endObject(); + searchSource.endObject(); + searchSource.endObject(); + } else { + filterHost = s -> true; + } + searchSource.endObject(); + record Fields(long messageId, String hostName) { + @SuppressWarnings("unchecked") + static Fields fromResponse(Map hit) { + List hostName = (List) hit.get("host.name"); + assertThat(hostName, hasSize(1)); + List messageId = (List) hit.get("message_id"); + assertThat(messageId, hasSize(1)); + return new Fields(messageId.getFirst().longValue(), hostName.getFirst()); + } + } + // remote only + { + var request = new Request("POST", "/*:l*,*:s*/_search"); + request.setJsonEntity(Strings.toString(searchSource)); + if (randomBoolean()) { + request.addParameter("ccs_minimize_roundtrips", Boolean.toString(randomBoolean())); + } + Response resp = client().performRequest(request); + assertOK(resp); + Stream hits = extractFields(resp).stream().map(Fields::fromResponse); + if (sorted == false) { + hits = hits.sorted(Comparator.comparingLong(Fields::messageId)); + } + var expectedHits = Stream.of(remoteLogsDocs, remoteStandardDocs) + .flatMap(Collection::stream) + .filter(d -> filterHost.test(d.hostName)) + .map(d -> new Fields(d.messageId, d.hostName)) + .sorted(Comparator.comparingLong(Fields::messageId)) + .toList(); + assertThat(hits.toList(), equalTo(expectedHits)); + } + // both clusters + { + var request = new Request("POST", "/*,*:*/_search"); + request.setJsonEntity(Strings.toString(searchSource)); + if (randomBoolean()) { + request.addParameter("ccs_minimize_roundtrips", Boolean.toString(randomBoolean())); + } + Response resp = client().performRequest(request); + assertOK(resp); + Stream hits = extractFields(resp).stream().map(Fields::fromResponse); + if (sorted == false) { + hits = hits.sorted(Comparator.comparingLong(Fields::messageId)); + } + var expectedHits = Stream.of(localLogsDocs, localStandardDocs, remoteLogsDocs, remoteStandardDocs) + .flatMap(Collection::stream) + .filter(d -> filterHost.test(d.hostName)) + .map(d -> new Fields(d.messageId, d.hostName)) + .sorted(Comparator.comparingLong(Fields::messageId)) + .toList(); + assertThat(hits.toList(), equalTo(expectedHits)); + } + } + + @SuppressWarnings("unchecked") + private static List> extractHits(final Response response) throws IOException { + final Map map = XContentHelper.convertToMap(XContentType.JSON.xContent(), response.getEntity().getContent(), true); + final Map hitsMap = (Map) map.get("hits"); + final List> hitsList = (List>) hitsMap.get("hits"); + return hitsList.stream().map(hit -> (Map) hit.get("_source")).toList(); + } + + @SuppressWarnings("unchecked") + private static List> extractFields(final Response response) throws IOException { + final Map map = XContentHelper.convertToMap(XContentType.JSON.xContent(), response.getEntity().getContent(), true); + final Map hitsMap = (Map) map.get("hits"); + final List> hitsList = (List>) hitsMap.get("hits"); + return hitsList.stream().map(hit -> (Map) hit.get("fields")).toList(); + } + + @Override + protected String getTestRestCluster() { + return localCluster.getHttpAddresses(); + } + + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @Override + protected boolean preserveClusterUponCompletion() { + return true; + } + + @Override + protected boolean preserveDataStreamsUponCompletion() { + return true; + } +} diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java index c9818a34169de..123ca3b806153 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeDisabledRestTestIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matchers; @@ -23,6 +24,22 @@ public class LogsIndexModeDisabledRestTestIT extends LogsIndexModeRestTestIT { + private static final String MAPPINGS = """ + { + "template": { + "mappings": { + "properties": { + "@timestamp": { + "type": "date" + }, + "message": { + "type": "text" + } + } + } + } + }"""; + @ClassRule() public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) @@ -50,8 +67,59 @@ public void setup() throws Exception { public void testLogsSettingsIndexModeDisabled() throws IOException { assertOK(createDataStream(client, "logs-custom-dev")); - final String indexMode = (String) getSetting(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0), "index.mode"); + final String indexMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 0), + IndexSettings.MODE.getKey() + ); assertThat(indexMode, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); } + public void testTogglingLogsdb() throws IOException { + putComponentTemplate(client, "logs@settings", MAPPINGS); + assertOK(createDataStream(client, "logs-custom-dev")); + final String indexModeBefore = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 0), + IndexSettings.MODE.getKey() + ); + assertThat(indexModeBefore, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + assertOK(putClusterSetting(client, "cluster.logsdb.enabled", "true")); + final String indexModeAfter = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 0), + IndexSettings.MODE.getKey() + ); + assertThat(indexModeAfter, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + assertOK(rolloverDataStream(client, "logs-custom-dev")); + final String indexModeLater = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 1), + IndexSettings.MODE.getKey() + ); + assertThat(indexModeLater, equalTo(IndexMode.LOGSDB.getName())); + assertOK(putClusterSetting(client, "cluster.logsdb.enabled", "false")); + assertOK(rolloverDataStream(client, "logs-custom-dev")); + final String indexModeFinal = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 2), + IndexSettings.MODE.getKey() + ); + assertThat(indexModeFinal, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + + } + + public void testEnablingLogsdb() throws IOException { + putComponentTemplate(client, "logs@settings", MAPPINGS); + assertOK(putClusterSetting(client, "cluster.logsdb.enabled", true)); + assertOK(createDataStream(client, "logs-custom-dev")); + final String indexMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 0), + IndexSettings.MODE.getKey() + ); + assertThat(indexMode, equalTo(IndexMode.LOGSDB.getName())); + assertOK(putClusterSetting(client, "cluster.logsdb.enabled", false)); + } + } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java index d7bdf54007d69..a024a2c0f303c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeEnabledRestTestIT.java @@ -10,8 +10,10 @@ package org.elasticsearch.datastreams.logsdb; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matchers; @@ -179,7 +181,11 @@ public void setup() throws Exception { public void testCreateDataStream() throws IOException { assertOK(putComponentTemplate(client, "logs@custom", MAPPINGS)); assertOK(createDataStream(client, "logs-custom-dev")); - final String indexMode = (String) getSetting(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0), "index.mode"); + final String indexMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 0), + IndexSettings.MODE.getKey() + ); assertThat(indexMode, equalTo(IndexMode.LOGSDB.getName())); } @@ -224,4 +230,83 @@ public void testRolloverDataStream() throws IOException { assertThat(firstBackingIndex, Matchers.not(equalTo(secondBackingIndex))); assertThat(getDataStreamBackingIndices(client, "logs-custom-dev").size(), equalTo(2)); } + + public void testLogsAtSettingWithStandardOverride() throws IOException { + assertOK(putComponentTemplate(client, "logs@custom", """ + { + "template": { + "settings": { + "index": { + "mode": "standard" + } + } + } + } + """)); + assertOK(createDataStream(client, "logs-custom-dev")); + final String indexMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 0), + IndexSettings.MODE.getKey() + ); + assertThat(indexMode, equalTo(IndexMode.STANDARD.getName())); + } + + public void testLogsAtSettingWithTimeSeriesOverride() throws IOException { + assertOK(putComponentTemplate(client, "logs@custom", """ + { + "template": { + "settings": { + "index": { + "routing_path": [ "hostname" ], + "mode": "time_series", + "sort.field": [], + "sort.order": [] + } + }, + "mappings": { + "properties": { + "hostname": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + } + """)); + assertOK(createDataStream(client, "logs-custom-dev")); + final String indexMode = (String) getSetting( + client, + getDataStreamBackingIndex(client, "logs-custom-dev", 0), + IndexSettings.MODE.getKey() + ); + assertThat(indexMode, equalTo(IndexMode.TIME_SERIES.getName())); + } + + public void testLogsAtSettingWithTimeSeriesOverrideFailure() { + // NOTE: apm@settings defines sorting on @timestamp and template composition results in index.mode "time_series" + // with a non-allowed index.sort.field '@timestamp'. This fails at template composition stage before the index is even created. + final ResponseException ex = assertThrows(ResponseException.class, () -> putComponentTemplate(client, "logs@custom", """ + { + "template": { + "settings": { + "index": { + "routing_path": [ "hostname" ], + "mode": "time_series" + } + }, + "mappings": { + "properties": { + "hostname": { + "type": "keyword", + "time_series_dimension": true + } + } + } + } + } + """)); + assertTrue(ex.getMessage().contains("[index.mode=time_series] is incompatible with [index.sort.field]")); + } } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java index 7d65207794598..22ac2b6d7d239 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/LogsIndexModeRestTestIT.java @@ -33,10 +33,16 @@ protected static void waitForLogs(RestClient client) throws Exception { }); } - protected static Response putComponentTemplate(final RestClient client, final String templateName, final String mappings) + protected static Response putComponentTemplate(final RestClient client, final String componentTemplate, final String contends) throws IOException { - final Request request = new Request("PUT", "/_component_template/" + templateName); - request.setJsonEntity(mappings); + final Request request = new Request("PUT", "/_component_template/" + componentTemplate); + request.setJsonEntity(contends); + return client.performRequest(request); + } + + protected static Response putTemplate(final RestClient client, final String template, final String contents) throws IOException { + final Request request = new Request("PUT", "/_index_template/" + template); + request.setJsonEntity(contents); return client.performRequest(request); } @@ -87,4 +93,11 @@ protected static Response bulkIndex(final RestClient client, final String dataSt bulkRequest.addParameter("refresh", "true"); return client.performRequest(bulkRequest); } + + protected static Response putClusterSetting(final RestClient client, final String settingName, final Object settingValue) + throws IOException { + final Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity("{ \"transient\": { \"" + settingName + "\": " + settingValue + " } }"); + return client.performRequest(request); + } } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java index 7fd1ccde10053..90e6855c58e1a 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/DataGenerationHelper.java @@ -27,27 +27,28 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Consumer; -class DataGenerationHelper { +public class DataGenerationHelper { private final ObjectMapper.Subobjects subobjects; private final boolean keepArraySource; private final DataGenerator dataGenerator; - DataGenerationHelper() { - // TODO enable subobjects: auto - // It is disabled because it currently does not have auto flattening and that results in asserts being triggered when using copy_to. - this.subobjects = ESTestCase.randomValueOtherThan( - ObjectMapper.Subobjects.AUTO, - () -> ESTestCase.randomFrom(ObjectMapper.Subobjects.values()) - ); + public DataGenerationHelper() { + this(b -> {}); + } + + public DataGenerationHelper(Consumer builderConfigurator) { + this.subobjects = ESTestCase.randomFrom(ObjectMapper.Subobjects.values()); this.keepArraySource = ESTestCase.randomBoolean(); var specificationBuilder = DataGeneratorSpecification.builder().withFullyDynamicMapping(ESTestCase.randomBoolean()); if (subobjects != ObjectMapper.Subobjects.ENABLED) { specificationBuilder = specificationBuilder.withNestedFieldsLimit(0); } - this.dataGenerator = new DataGenerator(specificationBuilder.withDataSourceHandlers(List.of(new DataSourceHandler() { + + specificationBuilder.withDataSourceHandlers(List.of(new DataSourceHandler() { @Override public DataSourceResponse.ObjectMappingParametersGenerator handle(DataSourceRequest.ObjectMappingParametersGenerator request) { if (subobjects == ObjectMapper.Subobjects.ENABLED) { @@ -113,8 +114,12 @@ public CheckedConsumer fieldValueGenerator() { } }) ) - ) - .build()); + ); + + // Customize builder if necessary + builderConfigurator.accept(specificationBuilder); + + this.dataGenerator = new DataGenerator(specificationBuilder.build()); } DataGenerator getDataGenerator() { diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java index 611f7fc5a9dcd..751336cc1f646 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java @@ -26,8 +26,12 @@ public class StandardVersusLogsIndexModeRandomDataChallengeRestIT extends Standa protected final DataGenerationHelper dataGenerationHelper; public StandardVersusLogsIndexModeRandomDataChallengeRestIT() { + this(new DataGenerationHelper()); + } + + protected StandardVersusLogsIndexModeRandomDataChallengeRestIT(DataGenerationHelper dataGenerationHelper) { super(); - dataGenerationHelper = new DataGenerationHelper(); + this.dataGenerationHelper = dataGenerationHelper; } @Override diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java new file mode 100644 index 0000000000000..6b0e4d4d0b34d --- /dev/null +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.logsdb.qa; + +import org.elasticsearch.common.settings.Settings; + +public class StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT extends + StandardVersusLogsIndexModeRandomDataChallengeRestIT { + public StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT() { + super(new DataGenerationHelper(builder -> builder.withFullyDynamicMapping(true))); + } + + @Override + public void contenderSettings(Settings.Builder builder) { + super.contenderSettings(builder); + // ignore_dynamic_beyond_limit is set in the template so it's always true + builder.put("index.mapping.total_fields.limit", randomIntBetween(1, 5000)); + } +} diff --git a/modules/dot-prefix-validation/src/yamlRestTest/resources/rest-api-spec/test/dot_prefix/10_basic.yml b/modules/dot-prefix-validation/src/yamlRestTest/resources/rest-api-spec/test/dot_prefix/10_basic.yml index b160af4ee8290..ae256daeb8abb 100644 --- a/modules/dot-prefix-validation/src/yamlRestTest/resources/rest-api-spec/test/dot_prefix/10_basic.yml +++ b/modules/dot-prefix-validation/src/yamlRestTest/resources/rest-api-spec/test/dot_prefix/10_basic.yml @@ -164,11 +164,13 @@ teardown: --- "Deprecated index template with a dot prefix index pattern": - requires: - test_runner_features: ["warnings", "headers"] + test_runner_features: ["warnings", "headers", "allowed_warnings"] - do: warnings: - "Index [.data-*] name begins with a dot (.), which is deprecated, and will not be allowed in a future Elasticsearch version." + allowed_warnings: + - "index template [my-template] has index patterns [regular, .data-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" indices.put_index_template: name: my-template body: @@ -177,7 +179,8 @@ teardown: - do: headers: { X-elastic-product-origin: kibana } - warnings: + allowed_warnings: + - "index template [my-template2] has index patterns [other, .data2-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template2] will take precedence during new index creation" indices.put_index_template: name: my-template2 body: diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java index 6bf38fbd34cfe..dccda0d58cfbf 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java @@ -9,6 +9,7 @@ package org.elasticsearch.ingest.geoip; +import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import java.util.Arrays; @@ -150,6 +151,29 @@ enum Database { private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise"; private static final String ISP_DB_SUFFIX = "-ISP"; + @Nullable + private static Database getMaxmindDatabase(final String databaseType) { + if (databaseType.endsWith(Database.CITY_DB_SUFFIX)) { + return Database.City; + } else if (databaseType.endsWith(Database.COUNTRY_DB_SUFFIX)) { + return Database.Country; + } else if (databaseType.endsWith(Database.ASN_DB_SUFFIX)) { + return Database.Asn; + } else if (databaseType.endsWith(Database.ANONYMOUS_IP_DB_SUFFIX)) { + return Database.AnonymousIp; + } else if (databaseType.endsWith(Database.CONNECTION_TYPE_DB_SUFFIX)) { + return Database.ConnectionType; + } else if (databaseType.endsWith(Database.DOMAIN_DB_SUFFIX)) { + return Database.Domain; + } else if (databaseType.endsWith(Database.ENTERPRISE_DB_SUFFIX)) { + return Database.Enterprise; + } else if (databaseType.endsWith(Database.ISP_DB_SUFFIX)) { + return Database.Isp; + } else { + return null; // no match was found + } + } + /** * Parses the passed-in databaseType (presumably from the passed-in databaseFile) and return the Database instance that is * associated with that databaseType. @@ -161,24 +185,9 @@ enum Database { */ public static Database getDatabase(final String databaseType, final String databaseFile) { Database database = null; - if (databaseType != null) { - if (databaseType.endsWith(Database.CITY_DB_SUFFIX)) { - database = Database.City; - } else if (databaseType.endsWith(Database.COUNTRY_DB_SUFFIX)) { - database = Database.Country; - } else if (databaseType.endsWith(Database.ASN_DB_SUFFIX)) { - database = Database.Asn; - } else if (databaseType.endsWith(Database.ANONYMOUS_IP_DB_SUFFIX)) { - database = Database.AnonymousIp; - } else if (databaseType.endsWith(Database.CONNECTION_TYPE_DB_SUFFIX)) { - database = Database.ConnectionType; - } else if (databaseType.endsWith(Database.DOMAIN_DB_SUFFIX)) { - database = Database.Domain; - } else if (databaseType.endsWith(Database.ENTERPRISE_DB_SUFFIX)) { - database = Database.Enterprise; - } else if (databaseType.endsWith(Database.ISP_DB_SUFFIX)) { - database = Database.Isp; - } + + if (Strings.hasText(databaseType)) { + database = getMaxmindDatabase(databaseType); } if (database == null) { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 3e4f2be5be8d5..ce160b060ae4c 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -106,14 +106,14 @@ boolean isIgnoreMissing() { } @Override - public IngestDocument execute(IngestDocument ingestDocument) throws IOException { - Object ip = ingestDocument.getFieldValue(field, Object.class, ignoreMissing); + public IngestDocument execute(IngestDocument document) throws IOException { + Object ip = document.getFieldValue(field, Object.class, ignoreMissing); if (isValid.get() == false) { - ingestDocument.appendFieldValue("tags", "_geoip_expired_database", false); - return ingestDocument; + document.appendFieldValue("tags", "_geoip_expired_database", false); + return document; } else if (ip == null && ignoreMissing) { - return ingestDocument; + return document; } else if (ip == null) { throw new IllegalArgumentException("field [" + field + "] is null, cannot extract geoip information."); } @@ -121,44 +121,44 @@ public IngestDocument execute(IngestDocument ingestDocument) throws IOException try (IpDatabase ipDatabase = this.supplier.get()) { if (ipDatabase == null) { if (ignoreMissing == false) { - tag(ingestDocument, databaseFile); + tag(document, databaseFile); } - return ingestDocument; + return document; } if (ip instanceof String ipString) { - Map geoData = getGeoData(ipDatabase, ipString); - if (geoData.isEmpty() == false) { - ingestDocument.setFieldValue(targetField, geoData); + Map data = getGeoData(ipDatabase, ipString); + if (data.isEmpty() == false) { + document.setFieldValue(targetField, data); } } else if (ip instanceof List ipList) { boolean match = false; - List> geoDataList = new ArrayList<>(ipList.size()); + List> dataList = new ArrayList<>(ipList.size()); for (Object ipAddr : ipList) { if (ipAddr instanceof String == false) { throw new IllegalArgumentException("array in field [" + field + "] should only contain strings"); } - Map geoData = getGeoData(ipDatabase, (String) ipAddr); - if (geoData.isEmpty()) { - geoDataList.add(null); + Map data = getGeoData(ipDatabase, (String) ipAddr); + if (data.isEmpty()) { + dataList.add(null); continue; } if (firstOnly) { - ingestDocument.setFieldValue(targetField, geoData); - return ingestDocument; + document.setFieldValue(targetField, data); + return document; } match = true; - geoDataList.add(geoData); + dataList.add(data); } if (match) { - ingestDocument.setFieldValue(targetField, geoDataList); + document.setFieldValue(targetField, dataList); } } else { throw new IllegalArgumentException("field [" + field + "] should contain only string or array of strings"); } } - return ingestDocument; + return document; } private Map getGeoData(IpDatabase ipDatabase, String ipAddress) throws IOException { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java index 730ae6d8b8ae5..0660a9ff0491d 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java @@ -74,7 +74,7 @@ protected void doExecute( * TransportGetDatabaseConfigurationAction used to be a TransportMasterNodeAction, and not all nodes in the cluster have been * updated. So we don't want to send node requests to the other nodes because they will blow up. Instead, we just return * the information that we used to return from the master node (it doesn't make any difference that this might not be the master - * node, because we're only reading the clsuter state). + * node, because we're only reading the cluster state). */ newResponseAsync(task, request, createActionContext(task, request), List.of(), List.of(), listener); } else { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java index 5473eab35aa2c..d441b749f4225 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java @@ -66,4 +66,39 @@ public void testSmallFileWithALongDescription() throws IOException { // it was once the case that we couldn't process an mmdb that was smaller than 512 bytes assertThat(Files.size(database), is(444L)); // 444 is <512 } + + public void testDatabaseTypeParsing() throws IOException { + // this test is a little bit overloaded -- it's testing that we're getting the expected sorts of + // database_type strings from these files, *and* it's also testing that we dispatch on those strings + // correctly and associated those files with the correct high-level Elasticsearch Database type. + // down the road it would probably make sense to split these out and find a better home for some of the + // logic, but for now it's probably more valuable to have the test *somewhere* than to get especially + // pedantic about where precisely it should be. + + copyDatabase("GeoLite2-City-Test.mmdb", tmpDir); + copyDatabase("GeoLite2-Country-Test.mmdb", tmpDir); + copyDatabase("GeoLite2-ASN-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Anonymous-IP-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-City-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Country-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Connection-Type-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Domain-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-Enterprise-Test.mmdb", tmpDir); + copyDatabase("GeoIP2-ISP-Test.mmdb", tmpDir); + + assertThat(parseDatabaseFromType("GeoLite2-City-Test.mmdb"), is(Database.City)); + assertThat(parseDatabaseFromType("GeoLite2-Country-Test.mmdb"), is(Database.Country)); + assertThat(parseDatabaseFromType("GeoLite2-ASN-Test.mmdb"), is(Database.Asn)); + assertThat(parseDatabaseFromType("GeoIP2-Anonymous-IP-Test.mmdb"), is(Database.AnonymousIp)); + assertThat(parseDatabaseFromType("GeoIP2-City-Test.mmdb"), is(Database.City)); + assertThat(parseDatabaseFromType("GeoIP2-Country-Test.mmdb"), is(Database.Country)); + assertThat(parseDatabaseFromType("GeoIP2-Connection-Type-Test.mmdb"), is(Database.ConnectionType)); + assertThat(parseDatabaseFromType("GeoIP2-Domain-Test.mmdb"), is(Database.Domain)); + assertThat(parseDatabaseFromType("GeoIP2-Enterprise-Test.mmdb"), is(Database.Enterprise)); + assertThat(parseDatabaseFromType("GeoIP2-ISP-Test.mmdb"), is(Database.Isp)); + } + + private Database parseDatabaseFromType(String databaseFile) throws IOException { + return Database.getDatabase(MMDBUtil.getDatabaseType(tmpDir.resolve(databaseFile)), null); + } } diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-Country-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-Country-Test.mmdb new file mode 100644 index 0000000000000..0b1f6cf50b2a0 Binary files /dev/null and b/modules/ingest-geoip/src/test/resources/GeoIP2-Country-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoLite2-ASN-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoLite2-ASN-Test.mmdb new file mode 100644 index 0000000000000..2614d1a7aa235 Binary files /dev/null and b/modules/ingest-geoip/src/test/resources/GeoLite2-ASN-Test.mmdb differ diff --git a/modules/ingest-geoip/src/test/resources/GeoLite2-Country-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoLite2-Country-Test.mmdb new file mode 100644 index 0000000000000..44b5ff1a3f1be Binary files /dev/null and b/modules/ingest-geoip/src/test/resources/GeoLite2-Country-Test.mmdb differ diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java deleted file mode 100644 index 3613d7390fda2..0000000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.script.mustache; - -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; -import org.mockito.Mockito; - -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public final class RestMultiSearchTemplateActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestMultiSearchTemplateAction(Settings.EMPTY)); - // todo how to workaround this? we get AssertionError without this - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(MultiSearchTemplateResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(MultiSearchTemplateResponse.class)); - } - - public void testTypeInPath() { - String content = """ - { "index": "some_index" } - {"source": {"query" : {"match_all" :{}}}} - """; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/some_type/_msearch/template").withContent(bytesContent, null).build(); - - dispatchRequest(request); - assertCriticalWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() { - String content = """ - { "index": "some_index", "type": "some_type" }\s - {"source": {"query" : {"match_all" :{}}}}\s - """; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withPath("/some_index/_msearch/template").withContent(bytesContent, null).build(); - - dispatchRequest(request); - assertCriticalWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java deleted file mode 100644 index 0216e750c55e0..0000000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.script.mustache; - -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.mockito.Mockito.mock; - -public final class RestSearchTemplateActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestSearchTemplateAction(nf -> false)); - verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/some_type/_search/template").build(); - - dispatchRequest(request); - assertCriticalWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/_search/template").withParams(params).build(); - - dispatchRequest(request); - assertCriticalWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 1950c72c80ec4..4f34cbd3cc475 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -108,7 +108,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -238,29 +237,21 @@ static Tuple parseClusterAliasAndIndex(String indexExpression) { return new Tuple<>(null, null); } String trimmed = indexExpression.trim(); - String sep = String.valueOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); - if (trimmed.startsWith(sep) || trimmed.endsWith(sep)) { - throw new IllegalArgumentException( - "Unable to parse one single valid index name from the provided index: [" + indexExpression + "]" - ); - } - + String[] parts = RemoteClusterAware.splitIndexName(trimmed); // The parser here needs to ensure that the indexExpression is not of the form "remote1:blogs,remote2:blogs" // because (1) only a single index is allowed for Painless Execute and // (2) if this method returns Tuple("remote1", "blogs,remote2:blogs") that will not fail with "index not found". // Instead, it will fail with the inaccurate and confusing error message: // "Cross-cluster calls are not supported in this context but remote indices were requested: [blogs,remote1:blogs]" // which comes later out of the IndexNameExpressionResolver pathway this code uses. - String[] parts = indexExpression.split(sep, 2); - if (parts.length == 1) { - return new Tuple<>(null, parts[0]); - } else if (parts.length == 2 && parts[1].contains(sep) == false) { - return new Tuple<>(parts[0], parts[1]); - } else { + if ((parts[0] != null && parts[1].isEmpty()) + || parts[1].contains(String.valueOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR))) { throw new IllegalArgumentException( "Unable to parse one single valid index name from the provided index: [" + indexExpression + "]" ); } + + return new Tuple<>(parts[0], parts[1]); } public String getClusterAlias() { @@ -556,8 +547,8 @@ protected void doExecute(Task task, Request request, ActionListener li // Visible for testing static void removeClusterAliasFromIndexExpression(Request request) { if (request.index() != null) { - String[] split = request.index().split(String.valueOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR)); - if (split.length > 1) { + String[] split = RemoteClusterAware.splitIndexName(request.index()); + if (split[0] != null) { /* * if the cluster alias is null and the index field has a clusterAlias (clusterAlias:index notation) * that means this is executing on a remote cluster (it was forwarded by the querying cluster). @@ -565,9 +556,6 @@ static void removeClusterAliasFromIndexExpression(Request request) { * We need to strip off the clusterAlias from the index before executing the script locally, * so it will resolve to a local index */ - assert split.length == 2 - : "If the index contains the REMOTE_CLUSTER_INDEX_SEPARATOR it should have only two parts but it has " - + Arrays.toString(split); request.index(split[1]); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index d6908f58a901a..88b773d413fab 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilder; @@ -32,9 +31,7 @@ import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.Matchers; import java.io.IOException; @@ -379,31 +376,4 @@ public void testDisallowExpensiveQueries() { ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> queryBuilder.toQuery(searchExecutionContext)); assertEquals("[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", e.getMessage()); } - - public void testFromJsonWithDocumentType() throws IOException { - SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); - String queryAsString = Strings.format(""" - {"percolate" : { "document": {}, "document_type":"%s", "field":"%s"}} - """, docType, queryField); - XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, queryAsString, RestApiVersion.V_7); - QueryBuilder queryBuilder = parseQuery(parser); - queryBuilder.toQuery(searchExecutionContext); - assertCriticalWarnings(PercolateQueryBuilder.DOCUMENT_TYPE_DEPRECATION_MESSAGE); - } - - public void testFromJsonWithType() throws IOException { - indexedDocumentIndex = randomAlphaOfLength(4); - indexedDocumentId = randomAlphaOfLength(4); - indexedDocumentVersion = Versions.MATCH_ANY; - documentSource = Collections.singletonList(randomSource(new HashSet<>())); - SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); - - String queryAsString = Strings.format(""" - {"percolate" : { "index": "%s", "type": "_doc", "id": "%s", "field":"%s"}} - """, indexedDocumentIndex, indexedDocumentId, queryField); - XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, queryAsString, RestApiVersion.V_7); - QueryBuilder queryBuilder = parseQuery(parser); - rewriteAndFetch(queryBuilder, searchExecutionContext).toQuery(searchExecutionContext); - assertCriticalWarnings(PercolateQueryBuilder.TYPE_DEPRECATION_MESSAGE); - } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java index 4d18f00ab572d..4b960e97ce0e0 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java @@ -156,21 +156,10 @@ static void validateAgainstAliases( } private static SearchRequest skipRemoteIndexNames(SearchRequest source) { - return new SearchRequest(source).indices( - Arrays.stream(source.indices()).filter(name -> isRemoteExpression(name) == false).toArray(String[]::new) - ); - } - - private static boolean isRemoteExpression(String expression) { // An index expression that references a remote cluster uses ":" to separate the cluster-alias from the index portion of the // expression, e.g., cluster0:index-name - // in the same time date-math `expression` can also contain ':' symbol inside its name - // to distinguish between those two, given `expression` is pre-evaluated using date-math resolver - // after evaluation date-math `expression` should not contain ':' symbol - // otherwise if `expression` is legit remote name, ':' symbol remains - // NOTE: index expressions can be prefixed with "-", which will not be parsed by resolveDateMathExpression, - // but in this particular case it doesn't seem to be relevant. - return IndexNameExpressionResolver.resolveDateMathExpression(expression) - .contains(String.valueOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR)); + return new SearchRequest(source).indices( + Arrays.stream(source.indices()).filter(name -> RemoteClusterAware.isRemoteIndexName(name) == false).toArray(String[]::new) + ); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java deleted file mode 100644 index 505b12833fb5e..0000000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.reindex; - -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.reindex.BulkByScrollResponse; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.mockito.Mockito.mock; - -public final class RestDeleteByQueryActionTests extends RestActionTestCase { - - final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestDeleteByQueryAction(nf -> false)); - verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); - } - - public void testTypeInPath() throws IOException { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.POST).withPath("/some_index/some_type/_delete_by_query").build(); - - // checks the type in the URL is propagated correctly to the request object - // only works after the request is dispatched, so its params are filled from url. - dispatchRequest(request); - - // RestDeleteByQueryAction itself doesn't check for a deprecated type usage - // checking here for a deprecation from its internal search request - assertCriticalWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - -} diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java deleted file mode 100644 index 0d35b30c86a5a..0000000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.reindex; - -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.reindex.BulkByScrollResponse; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.mockito.Mockito.mock; - -public final class RestUpdateByQueryActionTests extends RestActionTestCase { - - final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestUpdateByQueryAction(nf -> false)); - verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); - } - - public void testTypeInPath() throws IOException { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.POST).withPath("/some_index/some_type/_update_by_query").build(); - - // checks the type in the URL is propagated correctly to the request object - // only works after the request is dispatched, so its params are filled from url. - dispatchRequest(request); - - // RestUpdateByQueryAction itself doesn't check for a deprecated type usage - // checking here for a deprecation from its internal search request - assertCriticalWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureRepositoryMissingCredentialsIT.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureRepositoryMissingCredentialsIT.java index 7410b9acaf2b5..947f73c2ce580 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureRepositoryMissingCredentialsIT.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureRepositoryMissingCredentialsIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.repositories.azure; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -41,17 +40,13 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { public void testMissingCredentialsException() { assertThat( - asInstanceOf( + safeAwaitAndUnwrapFailure( RepositoryVerificationException.class, - ExceptionsHelper.unwrapCause( - safeAwaitFailure( - AcknowledgedResponse.class, - l -> client().execute( - TransportPutRepositoryAction.TYPE, - new PutRepositoryRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, "test-repo").type("azure"), - l - ) - ) + AcknowledgedResponse.class, + l -> client().execute( + TransportPutRepositoryAction.TYPE, + new PutRepositoryRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, "test-repo").type("azure"), + l ) ).getCause().getMessage(), allOf( diff --git a/muted-tests.yml b/muted-tests.yml index 9589746c7db47..39684d3039e7d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -251,18 +251,12 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_force_delete/Test force deleting a running transform} issue: https://github.com/elastic/elasticsearch/issues/113327 -- class: org.elasticsearch.xpack.security.support.SecurityIndexManagerIntegTests - method: testOnIndexAvailableForSearchIndexAlreadyAvailable - issue: https://github.com/elastic/elasticsearch/issues/113336 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=analytics/top_metrics/sort by scaled float field} issue: https://github.com/elastic/elasticsearch/issues/113340 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/ccr/apis/follow/post-resume-follow/line_84} issue: https://github.com/elastic/elasticsearch/issues/113343 -- class: org.elasticsearch.action.bulk.IncrementalBulkIT - method: testBulkLevelBulkFailureAfterFirstIncrementalRequest - issue: https://github.com/elastic/elasticsearch/issues/113365 - class: org.elasticsearch.xpack.ml.integration.MlJobIT method: testDeleteJob_TimingStatsDocumentIsDeleted issue: https://github.com/elastic/elasticsearch/issues/113370 @@ -293,18 +287,42 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates} issue: https://github.com/elastic/elasticsearch/issues/113537 -- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT - method: test {p0=esql/70_locale/Date format with default locale} - issue: https://github.com/elastic/elasticsearch/issues/113539 -- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT - method: test {p0=esql/70_locale/Date format with Italian locale} - issue: https://github.com/elastic/elasticsearch/issues/113540 - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5WithTrainedModelAndInference issue: https://github.com/elastic/elasticsearch/issues/113565 - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5Small_withPlatformAgnosticVariant issue: https://github.com/elastic/elasticsearch/issues/113577 +- class: org.elasticsearch.xpack.ml.integration.MlJobIT + method: testCantCreateJobWithSameID + issue: https://github.com/elastic/elasticsearch/issues/113581 +- class: org.elasticsearch.integration.KibanaUserRoleIntegTests + method: testFieldMappings + issue: https://github.com/elastic/elasticsearch/issues/113592 +- class: org.elasticsearch.integration.KibanaUserRoleIntegTests + method: testSearchAndMSearch + issue: https://github.com/elastic/elasticsearch/issues/113593 +- class: org.elasticsearch.xpack.transform.integration.TransformIT + method: testStopWaitForCheckpoint + issue: https://github.com/elastic/elasticsearch/issues/106113 +- class: org.elasticsearch.smoketest.MlWithSecurityIT + method: test {yaml=ml/3rd_party_deployment/Test start and stop multiple deployments} + issue: https://github.com/elastic/elasticsearch/issues/101458 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=search/540_ignore_above_synthetic_source/ignore_above mapping level setting on arrays} + issue: https://github.com/elastic/elasticsearch/issues/113648 +- class: org.elasticsearch.xpack.ml.integration.MlJobIT + method: testGetJobs_GivenMultipleJobs + issue: https://github.com/elastic/elasticsearch/issues/113654 +- class: org.elasticsearch.xpack.ml.integration.MlJobIT + method: testGetJobs_GivenSingleJob + issue: https://github.com/elastic/elasticsearch/issues/113655 +- class: org.elasticsearch.xpack.security.authz.interceptor.SearchRequestCacheDisablingInterceptorTests + method: testHasRemoteIndices + issue: https://github.com/elastic/elasticsearch/issues/113660 +- class: org.elasticsearch.xpack.security.authz.interceptor.SearchRequestCacheDisablingInterceptorTests + method: testRequestCacheWillBeDisabledWhenSearchRemoteIndices + issue: https://github.com/elastic/elasticsearch/issues/113659 # Examples: # diff --git a/plugins/analysis-nori/src/test/java/org/elasticsearch/plugin/analysis/nori/NoriAnalysisTests.java b/plugins/analysis-nori/src/test/java/org/elasticsearch/plugin/analysis/nori/NoriAnalysisTests.java index 1709d02263eea..f9a0b7d5c5216 100644 --- a/plugins/analysis-nori/src/test/java/org/elasticsearch/plugin/analysis/nori/NoriAnalysisTests.java +++ b/plugins/analysis-nori/src/test/java/org/elasticsearch/plugin/analysis/nori/NoriAnalysisTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.test.ESTestCase.TestAnalysis; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -33,6 +32,7 @@ import java.nio.file.Files; import java.nio.file.Path; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; diff --git a/plugins/examples/build.gradle b/plugins/examples/build.gradle index 8be4dfd7d1ab7..b60485edd1cb8 100644 --- a/plugins/examples/build.gradle +++ b/plugins/examples/build.gradle @@ -16,8 +16,8 @@ subprojects { apply plugin: 'java' java { - sourceCompatibility = 17 - targetCompatibility = 17 + sourceCompatibility = 21 + targetCompatibility = 21 } repositories { diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 32416b0d0d77d..168493eb52f60 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -13,7 +13,6 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.core.LogEvent; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; @@ -36,7 +35,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Function; @@ -70,34 +68,6 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { MockLog.init(); } - static class ExpectedStreamMessage implements MockLog.LoggingExpectation { - final String expectedLogger; - final String expectedMessage; - final CountDownLatch matched; - volatile boolean saw; - - ExpectedStreamMessage(String logger, String message, CountDownLatch matched) { - this.expectedLogger = logger; - this.expectedMessage = message; - this.matched = matched; - } - - @Override - public void match(LogEvent event) { - if (event.getLoggerName().equals(expectedLogger) - && event.getLevel().equals(Level.WARN) - && event.getMessage().getFormattedMessage().equals(expectedMessage)) { - saw = true; - matched.countDown(); - } - } - - @Override - public void assertMatched() { - assertTrue("Expected to see message [" + expectedMessage + "] on logger [" + expectedLogger + "]", saw); - } - } - /** * Simplest case: a module with no controller daemon. */ @@ -209,32 +179,32 @@ private void assertControllerSpawns(final Function pluginsDir String stderrLoggerName = "test_plugin-controller-stderr"; Loggers.setLevel(LogManager.getLogger(stdoutLoggerName), Level.TRACE); Loggers.setLevel(LogManager.getLogger(stderrLoggerName), Level.TRACE); - CountDownLatch messagesLoggedLatch = new CountDownLatch(2); - try (var mockLog = MockLog.capture(stdoutLoggerName, stderrLoggerName)) { - if (expectSpawn) { - mockLog.addExpectation(new ExpectedStreamMessage(stdoutLoggerName, "I am alive", messagesLoggedLatch)); - mockLog.addExpectation(new ExpectedStreamMessage(stderrLoggerName, "I am an error", messagesLoggedLatch)); + if (expectSpawn) { + final Process process; + try (var mockLog = MockLog.capture(stdoutLoggerName, stderrLoggerName)) { + mockLog.addExpectation(new MockLog.SeenEventExpectation("stdout", stdoutLoggerName, Level.WARN, "I am alive")); + mockLog.addExpectation(new MockLog.SeenEventExpectation("stderr", stderrLoggerName, Level.WARN, "I am an error")); + + try (var spawner = new Spawner()) { + spawner.spawnNativeControllers(environment); + List processes = spawner.getProcesses(); + + // as there should only be a reference in the list for the module that had the controller daemon, we expect one here + assertThat(processes, hasSize(1)); + process = processes.get(0); + // fail if we don't get the expected log messages soonish + mockLog.awaitAllExpectationsMatched(); + } } - Spawner spawner = new Spawner(); - spawner.spawnNativeControllers(environment); - - List processes = spawner.getProcesses(); - - if (expectSpawn) { - // as there should only be a reference in the list for the module that had the controller daemon, we expect one here - assertThat(processes, hasSize(1)); - Process process = processes.get(0); - // fail if we don't get the expected log messages within one second; usually it will be even quicker - assertTrue(messagesLoggedLatch.await(1, TimeUnit.SECONDS)); - spawner.close(); - // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling - assertTrue(process.waitFor(1, TimeUnit.SECONDS)); - } else { - assertThat(processes, is(empty())); + // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling + assertTrue(process.waitFor(1, TimeUnit.SECONDS)); + } else { + try (var spawner = new Spawner()) { + spawner.spawnNativeControllers(environment); + assertThat(spawner.getProcesses(), is(empty())); } - mockLog.assertAllExpectationsMatched(); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index a9402c324f7fc..f588b78c78cc8 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -386,6 +386,9 @@ public void test040JavaUsesTheOsProvidedKeystore() { if (distribution.packaging == Packaging.DOCKER_UBI || distribution.packaging == Packaging.DOCKER_IRON_BANK) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/pki/ca-trust/extracted/java/cacerts")); + } else if (distribution.packaging == Packaging.DOCKER_WOLFI) { + // In these images, the `cacerts` file ought to be a symlink here + assertThat(path, equalTo("/etc/ssl/certs/java/cacerts")); } else { // Whereas on other images, it's a real file so the real path is the same assertThat(path, equalTo("/usr/share/elasticsearch/jdk/lib/security/cacerts")); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index 5b86796aa80ca..a988a446f561f 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -436,7 +436,10 @@ private void verifyKeystorePermissions() { switch (distribution.packaging) { case TAR, ZIP -> assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); case DEB, RPM -> assertThat(keystore, file(File, "root", "elasticsearch", p660)); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS -> assertThat(keystore, DockerFileMatcher.file(p660)); + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat( + keystore, + DockerFileMatcher.file(p660) + ); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index a1a9af3b6e307..644990105f60f 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -245,7 +245,7 @@ protected static void install() throws Exception { installation = Packages.installPackage(sh, distribution); Packages.verifyPackageInstallation(installation, distribution, sh); } - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS -> { + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { installation = Docker.runContainer(distribution); Docker.verifyContainerInstallation(installation); } @@ -337,6 +337,7 @@ public Shell.Result runElasticsearchStartCommand(String password, boolean daemon case DOCKER_IRON_BANK: case DOCKER_CLOUD: case DOCKER_CLOUD_ESS: + case DOCKER_WOLFI: // nothing, "installing" docker image is running it return Shell.NO_OP; default: @@ -359,6 +360,7 @@ public void stopElasticsearch() throws Exception { case DOCKER_IRON_BANK: case DOCKER_CLOUD: case DOCKER_CLOUD_ESS: + case DOCKER_WOLFI: // nothing, "installing" docker image is running it break; default: @@ -371,7 +373,7 @@ public void awaitElasticsearchStartup(Shell.Result result) throws Exception { switch (distribution.packaging) { case TAR, ZIP -> Archives.assertElasticsearchStarted(installation); case DEB, RPM -> Packages.assertElasticsearchStarted(sh, installation); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS -> Docker.waitForElasticsearchToStart(); + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java index b3ea54425af8e..05cef4a0818ba 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java @@ -37,6 +37,8 @@ public Distribution(Path path) { this.packaging = Packaging.DOCKER_CLOUD; } else if (filename.endsWith(".cloud-ess.tar")) { this.packaging = Packaging.DOCKER_CLOUD_ESS; + } else if (filename.endsWith(".wolfi.tar")) { + this.packaging = Packaging.DOCKER_WOLFI; } else { int lastDot = filename.lastIndexOf('.'); this.packaging = Packaging.valueOf(filename.substring(lastDot + 1).toUpperCase(Locale.ROOT)); @@ -61,7 +63,7 @@ public boolean isPackage() { */ public boolean isDocker() { return switch (packaging) { - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS -> true; + case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; default -> false; }; } @@ -76,7 +78,8 @@ public enum Packaging { DOCKER_UBI(".ubi.tar", Platforms.isDocker()), DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker()), DOCKER_CLOUD(".cloud.tar", Platforms.isDocker()), - DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()); + DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()), + DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()); /** The extension of this distribution's file */ public final String extension; diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java index cb8a955a5972c..c38eaa58f0552 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java @@ -486,9 +486,9 @@ public static void verifyContainerInstallation(Installation es) { // Ensure the `elasticsearch` user and group exist. // These lines will both throw an exception if the command fails dockerShell.run("id elasticsearch"); - dockerShell.run("getent group elasticsearch"); + dockerShell.run("grep -E '^elasticsearch:' /etc/group"); - final Shell.Result passwdResult = dockerShell.run("getent passwd elasticsearch"); + final Shell.Result passwdResult = dockerShell.run("grep -E '^elasticsearch:' /etc/passwd"); final String homeDir = passwdResult.stdout().trim().split(":")[5]; assertThat("elasticsearch user's home directory is incorrect", homeDir, equalTo("/usr/share/elasticsearch")); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index 6c58bcba09879..e562e7591564e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -163,10 +163,11 @@ String build() { public static String getImageName(Distribution distribution) { String suffix = switch (distribution.packaging) { case DOCKER -> ""; - case DOCKER_UBI -> "-ubi8"; + case DOCKER_UBI -> "-ubi"; case DOCKER_IRON_BANK -> "-ironbank"; case DOCKER_CLOUD -> "-cloud"; case DOCKER_CLOUD_ESS -> "-cloud-ess"; + case DOCKER_WOLFI -> "-wolfi"; default -> throw new IllegalStateException("Unexpected distribution packaging type: " + distribution.packaging); }; diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index f3a977cd96f62..9c6a1ca2e96d2 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -375,7 +375,7 @@ setup: - do: allowed_warnings: - - "index template [test-composable-1] has index patterns [tsdb_templated_*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation" + - "index template [test-composable-1] has index patterns [foo*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation" indices.put_index_template: name: test-composable-1 body: @@ -527,7 +527,7 @@ setup: - do: allowed_warnings: - - "index template [test-composable-1] has index patterns [tsdb_templated_*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation" + - "index template [test-composable-1] has index patterns [foo*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation" indices.put_index_template: name: test-composable-1 body: diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 45c1b65d19600..ed1cf905f7e9d 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -54,9 +54,8 @@ tasks.named("precommit").configure { dependsOn 'enforceYamlTestConvention' } -tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTest("tsdb/140_routing_path/multi-value routing path field", "Multi-value routing paths are allowed now. See #112645") - task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") - task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") - task.skipTest("search/540_ignore_above_synthetic_source/ignore_above mapping level setting on arrays", "Temporary mute while backporting to 8.x") +tasks.named("yamlRestCompatTestTransform").configure({task -> + task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") + task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") + task.skipTestsByFilePattern("indices.create/synthetic_source*.yml", "@UpdateForV9 -> tests do not pass after bumping API version to 9 [ES-9597]") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml index 414c24cfffd7d..603cc4fc2e304 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml @@ -2,7 +2,7 @@ "Metrics object indexing": - requires: test_runner_features: [ "allowed_warnings", "allowed_warnings_regex" ] - cluster_features: ["mapper.subobjects_auto"] + cluster_features: ["mapper.subobjects_auto_fixes"] reason: requires supporting subobjects auto setting - do: @@ -69,7 +69,7 @@ "Root with metrics": - requires: test_runner_features: [ "allowed_warnings", "allowed_warnings_regex" ] - cluster_features: ["mapper.subobjects_auto"] + cluster_features: ["mapper.subobjects_auto_fixes"] reason: requires supporting subobjects auto setting - do: @@ -131,7 +131,7 @@ "Metrics object indexing with synthetic source": - requires: test_runner_features: [ "allowed_warnings", "allowed_warnings_regex" ] - cluster_features: ["mapper.subobjects_auto"] + cluster_features: ["mapper.subobjects_auto_fixes"] reason: added in 8.4.0 - do: @@ -201,7 +201,7 @@ "Root without subobjects with synthetic source": - requires: test_runner_features: [ "allowed_warnings", "allowed_warnings_regex" ] - cluster_features: ["mapper.subobjects_auto"] + cluster_features: ["mapper.subobjects_auto_fixes"] reason: added in 8.4.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml index b5a9146bc54a6..41d9fcc30a880 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml @@ -887,7 +887,7 @@ doubly nested object: --- subobjects auto: - requires: - cluster_features: ["mapper.subobjects_auto"] + cluster_features: ["mapper.subobjects_auto_fixes"] reason: requires tracking ignored source and supporting subobjects auto setting - do: @@ -924,9 +924,21 @@ subobjects auto: type: keyword nested: type: nested - auto_obj: - type: object - subobjects: auto + path: + properties: + to: + properties: + auto_obj: + type: object + subobjects: auto + properties: + inner: + properties: + id: + type: keyword + id: + type: + integer - do: bulk: @@ -934,13 +946,13 @@ subobjects auto: refresh: true body: - '{ "create": { } }' - - '{ "id": 1, "foo": 10, "foo.bar": 100, "regular": [ { "trace": { "id": "a" }, "span": { "id": "1" } }, { "trace": { "id": "b" }, "span": { "id": "1" } } ] }' + - '{ "id": 1, "foo": 10, "foo.bar": 100, "regular.trace.id": ["b", "a", "b"], "regular.span.id": "1" }' - '{ "create": { } }' - '{ "id": 2, "foo": 20, "foo.bar": 200, "stored": [ { "trace": { "id": "a" }, "span": { "id": "1" } }, { "trace": { "id": "b" }, "span": { "id": "1" } } ] }' - '{ "create": { } }' - '{ "id": 3, "foo": 30, "foo.bar": 300, "nested": [ { "a": 10, "b": 20 }, { "a": 100, "b": 200 } ] }' - '{ "create": { } }' - - '{ "id": 4, "auto_obj": { "foo": 40, "foo.bar": 400 } }' + - '{ "id": 4, "path.to.auto_obj": { "foo": 40, "foo.bar": 400, "inner.id": "baz" }, "path.to.id": 4000 }' - match: { errors: false } @@ -952,8 +964,8 @@ subobjects auto: - match: { hits.hits.0._source.id: 1 } - match: { hits.hits.0._source.foo: 10 } - match: { hits.hits.0._source.foo\.bar: 100 } - - match: { hits.hits.0._source.regular.span.id: "1" } - - match: { hits.hits.0._source.regular.trace.id: [ "a", "b" ] } + - match: { hits.hits.0._source.regular\.span\.id: "1" } + - match: { hits.hits.0._source.regular\.trace\.id: [ "a", "b" ] } - match: { hits.hits.1._source.id: 2 } - match: { hits.hits.1._source.foo: 20 } - match: { hits.hits.1._source.foo\.bar: 200 } @@ -969,8 +981,110 @@ subobjects auto: - match: { hits.hits.2._source.nested.1.a: 100 } - match: { hits.hits.2._source.nested.1.b: 200 } - match: { hits.hits.3._source.id: 4 } - - match: { hits.hits.3._source.auto_obj.foo: 40 } - - match: { hits.hits.3._source.auto_obj.foo\.bar: 400 } + - match: { hits.hits.3._source.path\.to\.auto_obj.foo: 40 } + - match: { hits.hits.3._source.path\.to\.auto_obj.foo\.bar: 400 } + - match: { hits.hits.3._source.path\.to\.auto_obj.inner\.id: baz } + - match: { hits.hits.3._source.path\.to\.id: 4000 } + + +--- +subobjects auto with path flattening: + - requires: + cluster_features: ["mapper.subobjects_auto_fixes"] + reason: requires tracking ignored source and supporting subobjects auto setting + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + subobjects: auto + properties: + id: + type: integer + attributes: + type: object + subobjects: auto + + - do: + bulk: + index: test + refresh: true + body: + - '{ "create": { } }' + - '{ "id": 1, "attributes": { "foo": { "bar": 10 } } }' + - '{ "create": { } }' + - '{ "id": 2, "attributes": { "foo": { "bar": 20 } } }' + - '{ "create": { } }' + - '{ "id": 3, "attributes": { "foo": { "bar": 30 } } }' + - '{ "create": { } }' + - '{ "id": 4, "attributes": { "foo": { "bar": 40 } } }' + + - match: { errors: false } + + - do: + search: + index: test + sort: id + + - match: { hits.hits.0._source.id: 1 } + - match: { hits.hits.0._source.attributes.foo\.bar: 10 } + - match: { hits.hits.1._source.id: 2 } + - match: { hits.hits.1._source.attributes.foo\.bar: 20 } + - match: { hits.hits.2._source.id: 3 } + - match: { hits.hits.2._source.attributes.foo\.bar: 30 } + - match: { hits.hits.3._source.id: 4 } + - match: { hits.hits.3._source.attributes.foo\.bar: 40 } + + +--- +subobjects auto with dynamic template: + - requires: + cluster_features: ["mapper.subobjects_auto_fixes"] + reason: requires tracking ignored source and supporting subobjects auto setting + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + subobjects: auto + dynamic_templates: + - attributes_tmpl: + match: attributes + mapping: + type: object + enabled: false + subobjects: auto + properties: + id: + type: integer + + - do: + bulk: + index: test + refresh: true + body: + - '{ "create": { } }' + - '{ "id": 1, "attributes": { "foo": 10, "path.to.bar": "val1" }, "a": 100, "a.b": 1000 }' + + - match: { errors: false } + + - do: + search: + index: test + sort: id + + - match: { hits.hits.0._source.id: 1 } + - match: { hits.hits.0._source.attributes.foo: 10 } + - match: { hits.hits.0._source.attributes.path\.to\.bar: val1 } + - match: { hits.hits.0._source.a: 100 } + - match: { hits.hits.0._source.a\.b: 1000 } + --- synthetic_source with copy_to: @@ -1755,7 +1869,7 @@ synthetic_source with copy_to pointing to ambiguous field and subobjects false: --- synthetic_source with copy_to pointing to ambiguous field and subobjects auto: - requires: - cluster_features: ["mapper.source.synthetic_source_copy_to_inside_objects_fix"] + cluster_features: ["mapper.subobjects_auto_fixes"] reason: requires copy_to support in synthetic source - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml index 3d82539944a97..912f4e9f93df9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml @@ -453,7 +453,7 @@ --- "Composable index templates that include subobjects: auto at root": - requires: - cluster_features: ["mapper.subobjects_auto"] + cluster_features: ["mapper.subobjects_auto_fixes"] reason: "https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0" test_runner_features: "allowed_warnings" @@ -504,7 +504,7 @@ --- "Composable index templates that include subobjects: auto on arbitrary field": - requires: - cluster_features: ["mapper.subobjects_auto"] + cluster_features: ["mapper.subobjects_auto_fixes"] reason: "https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0" test_runner_features: "allowed_warnings" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index 8a8dffda69e20..2b77b5558b3d3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -1129,7 +1129,7 @@ fetch geo_point: --- "Test with subobjects: auto": - requires: - cluster_features: "mapper.subobjects_auto" + cluster_features: "mapper.subobjects_auto_fixes" reason: requires support for subobjects auto setting - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/540_ignore_above_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/540_ignore_above_synthetic_source.yml index 11259d3e1bfd1..435cda637cca6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/540_ignore_above_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/540_ignore_above_synthetic_source.yml @@ -80,8 +80,7 @@ ignore_above mapping level setting on arrays: match_all: {} - length: { hits.hits: 1 } - #TODO: synthetic source field reconstruction bug (TBD: add link to the issue here) - #- match: { hits.hits.0._source.keyword: ["foo bar", "the quick brown fox"] } + - match: { hits.hits.0._source.keyword: ["foo bar", "the quick brown fox"] } - match: { hits.hits.0._source.flattened.value: [ "jumps over", "the quick brown fox" ] } - match: { hits.hits.0.fields.keyword.0: "foo bar" } - match: { hits.hits.0.fields.flattened.0.value: "jumps over" } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index d7a5d4e2ac973..75f914f76dd77 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Releasable; @@ -37,6 +38,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; @@ -214,14 +216,8 @@ public void testGlobalBulkFailure() throws InterruptedException { IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, randomNodeName); ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, randomNodeName); - int threadCount = threadPool.info(ThreadPool.Names.WRITE).getMax(); - long queueSize = threadPool.info(ThreadPool.Names.WRITE).getQueueSize().singles(); - blockWritePool(threadCount, threadPool, blockingLatch); - - Runnable runnable = () -> {}; - for (int i = 0; i < queueSize; i++) { - threadPool.executor(ThreadPool.Names.WRITE).execute(runnable); - } + blockWritePool(threadPool, blockingLatch); + fillWriteQueue(threadPool); IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); if (randomBoolean()) { @@ -253,35 +249,32 @@ public void testBulkLevelBulkFailureAfterFirstIncrementalRequest() throws Except AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); PlainActionFuture future = new PlainActionFuture<>(); - int threadCount = threadPool.info(ThreadPool.Names.WRITE).getMax(); - long queueSize = threadPool.info(ThreadPool.Names.WRITE).getQueueSize().singles(); - CountDownLatch blockingLatch1 = new CountDownLatch(1); AtomicBoolean nextRequested = new AtomicBoolean(true); AtomicLong hits = new AtomicLong(0); - try (Releasable ignored2 = blockingLatch1::countDown;) { - blockWritePool(threadCount, threadPool, blockingLatch1); + try { + blockWritePool(threadPool, blockingLatch1); while (nextRequested.get()) { nextRequested.set(false); refCounted.incRef(); handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextRequested.set(true)); hits.incrementAndGet(); } + } finally { + blockingLatch1.countDown(); } assertBusy(() -> assertTrue(nextRequested.get())); CountDownLatch blockingLatch2 = new CountDownLatch(1); - try (Releasable ignored3 = blockingLatch2::countDown;) { - blockWritePool(threadCount, threadPool, blockingLatch2); - Runnable runnable = () -> {}; - // Fill Queue - for (int i = 0; i < queueSize; i++) { - threadPool.executor(ThreadPool.Names.WRITE).execute(runnable); - } + try { + blockWritePool(threadPool, blockingLatch2); + fillWriteQueue(threadPool); handler.lastItems(List.of(indexRequest(index)), refCounted::decRef, future); + } finally { + blockingLatch2.countDown(); } // Should not throw because some succeeded @@ -459,19 +452,55 @@ public void testShortCircuitShardLevelFailureWithIngestNodeHop() throws Exceptio } } - private static void blockWritePool(int threadCount, ThreadPool threadPool, CountDownLatch blockingLatch) throws InterruptedException { - CountDownLatch startedLatch = new CountDownLatch(threadCount); + private static void blockWritePool(ThreadPool threadPool, CountDownLatch finishLatch) { + final var threadCount = threadPool.info(ThreadPool.Names.WRITE).getMax(); + final var startBarrier = new CyclicBarrier(threadCount + 1); + final var blockingTask = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + fail(e); + } + + @Override + protected void doRun() { + safeAwait(startBarrier); + safeAwait(finishLatch); + } + + @Override + public boolean isForceExecution() { + return true; + } + }; for (int i = 0; i < threadCount; i++) { - threadPool.executor(ThreadPool.Names.WRITE).execute(() -> { - startedLatch.countDown(); - try { - blockingLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + threadPool.executor(ThreadPool.Names.WRITE).execute(blockingTask); + } + safeAwait(startBarrier); + } + + private static void fillWriteQueue(ThreadPool threadPool) { + final var queueSize = Math.toIntExact(threadPool.info(ThreadPool.Names.WRITE).getQueueSize().singles()); + final var queueFilled = new AtomicBoolean(false); + final var queueFillingTask = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + fail(e); + } + + @Override + protected void doRun() { + assertTrue("thread pool not blocked", queueFilled.get()); + } + + @Override + public boolean isForceExecution() { + return true; + } + }; + for (int i = 0; i < queueSize; i++) { + threadPool.executor(ThreadPool.Names.WRITE).execute(queueFillingTask); } - startedLatch.await(); + queueFilled.set(true); } private BulkResponse executeBulk(long docs, String index, IncrementalBulkService.Handler handler, ExecutorService executorService) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java index c81b9a82e8e32..99d7b28536f7a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/shards/ClusterSearchShardsIT.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.cluster.shards; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; @@ -145,17 +144,13 @@ public void testClusterSearchShardsWithBlocks() { enableIndexBlock("test-blocks", SETTING_BLOCKS_METADATA); assertBlocked( null, - asInstanceOf( + safeAwaitAndUnwrapFailure( ClusterBlockException.class, - ExceptionsHelper.unwrapCause( - safeAwaitFailure( - ClusterSearchShardsResponse.class, - l -> client().execute( - TransportClusterSearchShardsAction.TYPE, - new ClusterSearchShardsRequest(TEST_REQUEST_TIMEOUT, "test-blocks"), - l - ) - ) + ClusterSearchShardsResponse.class, + l -> client().execute( + TransportClusterSearchShardsAction.TYPE, + new ClusterSearchShardsRequest(TEST_REQUEST_TIMEOUT, "test-blocks"), + l ) ) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java b/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java index 6d3ed45f4015a..f2441e43de8d8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.common.network; -import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.Level; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.Request; @@ -23,7 +23,6 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; @@ -43,7 +42,6 @@ import java.io.IOException; import java.util.Collection; import java.util.List; -import java.util.concurrent.CountDownLatch; import java.util.function.Predicate; import java.util.function.Supplier; @@ -103,26 +101,24 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c } private static void blockAndWaitForWatchdogLogs() { - final var threadName = Thread.currentThread().getName(); - final var logsSeenLatch = new CountDownLatch(2); - final var warningSeen = new RunOnce(logsSeenLatch::countDown); - final var threadDumpSeen = new RunOnce(logsSeenLatch::countDown); - MockLog.assertThatLogger(() -> safeAwait(logsSeenLatch), ThreadWatchdog.class, new MockLog.LoggingExpectation() { - @Override - public void match(LogEvent event) { - final var formattedMessage = event.getMessage().getFormattedMessage(); - if (formattedMessage.contains("the following threads are active but did not make progress in the preceding [100ms]:") - && formattedMessage.contains(threadName)) { - warningSeen.run(); - } - if (formattedMessage.contains("hot threads dump due to active threads not making progress")) { - threadDumpSeen.run(); - } - } - - @Override - public void assertMatched() {} - }); + MockLog.awaitLogger( + () -> {}, + ThreadWatchdog.class, + new MockLog.SeenEventExpectation( + "warning", + ThreadWatchdog.class.getCanonicalName(), + Level.WARN, + "*the following threads are active but did not make progress in the preceding [100ms]:*" + + Thread.currentThread().getName() + + "*" + ), + new MockLog.SeenEventExpectation( + "thread dump", + ThreadWatchdog.class.getCanonicalName(), + Level.WARN, + "*hot threads dump due to active threads not making progress*" + ) + ); } public void testThreadWatchdogHttpLogging() throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index 8fd10cdf07310..10f13f6ab152f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -160,7 +160,7 @@ public boolean innerMatch(final LogEvent event) { other.beforeTest(random()); final ClusterState first = internalCluster().getInstance(ClusterService.class).state(); assertThat(first.nodes().getSize(), equalTo(1)); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); } finally { other.close(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java index 48009676ee6b7..74fd945ed3779 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java @@ -31,6 +31,7 @@ public void testClusterHasFeatures() { FeatureService service = internalCluster().getCurrentMasterNodeInstance(FeatureService.class); assertThat(service.getNodeFeatures(), hasKey(FeatureService.FEATURES_SUPPORTED.id())); + assertThat(service.getNodeFeatures(), hasKey(FeatureService.TEST_FEATURES_ENABLED.id())); // check the nodes all have a feature in their cluster state (there should always be features_supported) var response = clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT).clear().nodes(true)).actionGet(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ShardLockFailureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ShardLockFailureIT.java index 9bcd8528acf9e..0ce3ca53e1c1f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ShardLockFailureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ShardLockFailureIT.java @@ -72,9 +72,9 @@ public void testShardLockFailure() throws Exception { var ignored1 = internalCluster().getInstance(NodeEnvironment.class, node).shardLock(shardId, "blocked for test"); var mockLog = MockLog.capture(IndicesClusterStateService.class); ) { - final CountDownLatch countDownLatch = new CountDownLatch(1); mockLog.addExpectation(new MockLog.LoggingExpectation() { + private final CountDownLatch countDownLatch = new CountDownLatch(1); int debugMessagesSeen = 0; int warnMessagesSeen = 0; @@ -101,14 +101,20 @@ public synchronized void match(LogEvent event) { } @Override - public void assertMatched() {} + public void assertMatched() { + fail("unused"); + } + + @Override + public void awaitMatched(long millis) throws InterruptedException { + assertTrue(countDownLatch.await(millis, TimeUnit.MILLISECONDS)); + } }); updateIndexSettings(Settings.builder().putNull(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "._name"), indexName); ensureYellow(indexName); - assertTrue(countDownLatch.await(30, TimeUnit.SECONDS)); + mockLog.awaitAllExpectationsMatched(); assertEquals(ClusterHealthStatus.YELLOW, clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT, indexName).get().getStatus()); - mockLog.assertAllExpectationsMatched(); } ensureGreen(indexName); @@ -153,7 +159,7 @@ public void testShardLockTimeout() throws Exception { ); updateIndexSettings(Settings.builder().putNull(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "._name"), indexName); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); final var clusterHealthResponse = clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT, indexName) .setWaitForEvents(Priority.LANGUID) .setTimeout(TimeValue.timeValueSeconds(10)) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java index 5e728bede4dbe..6ebb5f5287e1b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -10,7 +10,6 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.bytes.BytesReference; @@ -61,17 +60,13 @@ public void testFailPipelineCreation() { ensureStableCluster(2, node2); assertThat( - asInstanceOf( + safeAwaitAndUnwrapFailure( ElasticsearchParseException.class, - ExceptionsHelper.unwrapCause( - safeAwaitFailure( - AcknowledgedResponse.class, - l -> client().execute( - PutPipelineTransportAction.TYPE, - IngestPipelineTestUtils.putJsonPipelineRequest("id", pipelineSource), - l - ) - ) + AcknowledgedResponse.class, + l -> client().execute( + PutPipelineTransportAction.TYPE, + IngestPipelineTestUtils.putJsonPipelineRequest("id", pipelineSource), + l ) ).getMessage(), containsString("Processor type [test] is not installed on node") @@ -84,17 +79,13 @@ public void testFailPipelineCreationProcessorNotInstalledOnMasterNode() throws E internalCluster().startNode(); assertThat( - asInstanceOf( + safeAwaitAndUnwrapFailure( ElasticsearchParseException.class, - ExceptionsHelper.unwrapCause( - safeAwaitFailure( - AcknowledgedResponse.class, - l -> client().execute( - PutPipelineTransportAction.TYPE, - IngestPipelineTestUtils.putJsonPipelineRequest("id", pipelineSource), - l - ) - ) + AcknowledgedResponse.class, + l -> client().execute( + PutPipelineTransportAction.TYPE, + IngestPipelineTestUtils.putJsonPipelineRequest("id", pipelineSource), + l ) ).getMessage(), equalTo("No processor type exists with name [test]") diff --git a/server/src/internalClusterTest/java/org/elasticsearch/script/StoredScriptsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/script/StoredScriptsIT.java index d68aae26560bf..e9efab5934e52 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/script/StoredScriptsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/script/StoredScriptsIT.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.script; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; @@ -71,20 +70,16 @@ public void testBasics() { assertEquals( "Validation Failed: 1: id cannot contain '#' for stored script;", - asInstanceOf( + safeAwaitAndUnwrapFailure( IllegalArgumentException.class, - ExceptionsHelper.unwrapCause( - safeAwaitFailure( - AcknowledgedResponse.class, - l -> client().execute( - TransportPutStoredScriptAction.TYPE, - new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id("id#") - .content(new BytesArray(Strings.format(""" - {"script": {"lang": "%s", "source": "1"} } - """, LANG)), XContentType.JSON), - l - ) - ) + AcknowledgedResponse.class, + l -> client().execute( + TransportPutStoredScriptAction.TYPE, + new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id("id#") + .content(new BytesArray(Strings.format(""" + {"script": {"lang": "%s", "source": "1"} } + """, LANG)), XContentType.JSON), + l ) ).getMessage() ); @@ -93,21 +88,16 @@ public void testBasics() { public void testMaxScriptSize() { assertEquals( "exceeded max allowed stored script size in bytes [64] with size [65] for script [foobar]", - asInstanceOf( + safeAwaitAndUnwrapFailure( IllegalArgumentException.class, - ExceptionsHelper.unwrapCause( - safeAwaitFailure( - AcknowledgedResponse.class, - l -> client().execute( - TransportPutStoredScriptAction.TYPE, - new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id("foobar") - .content(new BytesArray(Strings.format(""" - {"script": { "lang": "%s", "source":"0123456789abcdef"} }\ - """, LANG)), XContentType.JSON), - l - ) - - ) + AcknowledgedResponse.class, + l -> client().execute( + TransportPutStoredScriptAction.TYPE, + new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id("foobar") + .content(new BytesArray(Strings.format(""" + {"script": { "lang": "%s", "source":"0123456789abcdef"} }\ + """, LANG)), XContentType.JSON), + l ) ).getMessage() ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java index 48dda7fd30068..89474a0181597 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentType; import java.util.Map; import java.util.Set; @@ -85,4 +86,33 @@ public void testCollapseWithFields() { } ); } + + public void testCollapseWithStoredFields() { + final String indexName = "test_collapse"; + createIndex(indexName); + final String collapseField = "collapse_field"; + assertAcked(indicesAdmin().preparePutMapping(indexName).setSource(""" + { + "dynamic": "strict", + "properties": { + "collapse_field": { "type": "keyword", "store": true }, + "ts": { "type": "date", "store": true } + } + } + """, XContentType.JSON)); + index(indexName, "id_1_0", Map.of(collapseField, "value1", "ts", 0)); + index(indexName, "id_1_1", Map.of(collapseField, "value1", "ts", 1)); + index(indexName, "id_2_0", Map.of(collapseField, "value2", "ts", 2)); + refresh(indexName); + + assertNoFailuresAndResponse( + prepareSearch(indexName).setQuery(new MatchAllQueryBuilder()) + .setFetchSource(false) + .storedFields("*") + .setCollapse(new CollapseBuilder(collapseField)), + searchResponse -> { + assertEquals(collapseField, searchResponse.getHits().getCollapseField()); + } + ); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index ac68ff243166b..a754350c8faf7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -693,7 +693,7 @@ public void testCancel() throws Exception { } }, 30, TimeUnit.SECONDS); cancellable.cancel(); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); logger.info("--> waiting for field-caps tasks to be cancelled"); assertBusy(() -> { List tasks = clusterAdmin().prepareListTasks() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java index 2206f34e4d2f3..eea60ce13af2f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java @@ -382,9 +382,10 @@ public void testMountCorruptedRepositoryData() throws Exception { Files.write(repo.resolve(getRepositoryDataBlobName(repositoryData.getGenId())), randomByteArrayOfLength(randomIntBetween(1, 100))); logger.info("--> verify loading repository data throws RepositoryException"); - asInstanceOf( + safeAwaitFailure( RepositoryException.class, - safeAwaitFailure(RepositoryData.class, l -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, l)) + RepositoryData.class, + l -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, l) ); final String otherRepoName = "other-repo"; @@ -397,9 +398,10 @@ public void testMountCorruptedRepositoryData() throws Exception { final Repository otherRepo = getRepositoryOnMaster(otherRepoName); logger.info("--> verify loading repository data from newly mounted repository throws RepositoryException"); - asInstanceOf( + safeAwaitFailure( RepositoryException.class, - safeAwaitFailure(RepositoryData.class, l -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, l)) + RepositoryData.class, + l -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, l) ); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 2d0f526f64a69..b519e263dd387 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -225,6 +225,7 @@ static TransportVersion def(int id) { public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_TOTAL_SHARDS_PER_NODE = def(8_749_00_0); public static final TransportVersion SEMANTIC_TEXT_SEARCH_INFERENCE_ID = def(8_750_00_0); public static final TransportVersion ML_INFERENCE_CHUNKING_SETTINGS = def(8_751_00_0); + public static final TransportVersion SEMANTIC_QUERY_INNER_HITS = def(8_752_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 23436786126d8..fea27255e1328 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -185,6 +185,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_0 = new Version(8_15_00_99); public static final Version V_8_15_1 = new Version(8_15_01_99); public static final Version V_8_15_2 = new Version(8_15_02_99); + public static final Version V_8_15_3 = new Version(8_15_03_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java index 8df34d882941a..fb7539ea218d1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.rest.RestController; @@ -151,6 +152,10 @@ public NodeCapabilitiesRequest( this.restApiVersion = restApiVersion; } + @UpdateForV9 // 8.x blows up in a mixed cluster when trying to read RestApiVersion.forMajor(9) + // ./gradlew ":qa:mixed-cluster:v8.16.0#mixedClusterTest" + // -Dtests.class="org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT" + // -Dtests.method="test {p0=capabilities/10_basic/Capabilities API}" @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -159,7 +164,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(path); out.writeCollection(parameters, StreamOutput::writeString); out.writeCollection(capabilities, StreamOutput::writeString); - out.writeVInt(restApiVersion.major); + // Fixme: lies! all lies! + out.writeVInt(8); + // out.writeVInt(restApiVersion.major); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java index dbcece1eb4364..9c5b6097b11bd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java @@ -15,13 +15,12 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.RemoteClusterAware; import java.io.IOException; import java.util.Arrays; @@ -166,13 +165,7 @@ public String getDescription() { boolean localIndicesPresent(String[] indices) { for (String index : indices) { - // ensure that `index` is a remote name and not a date math expression which includes ':' symbol - // since date math expression after evaluation should not contain ':' symbol - // NOTE: index expressions can be prefixed with "-" for index exclusion, which will not be parsed by resolveDateMathExpression - String indexExpression = IndexNameExpressionResolver.resolveDateMathExpression( - index.charAt(0) == '-' ? index.substring(1) : index - ); - if (indexExpression.indexOf(RemoteClusterService.REMOTE_CLUSTER_INDEX_SEPARATOR) < 0) { + if (RemoteClusterAware.isRemoteIndexName(index) == false) { return true; } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index cfc5b7802d989..0c22a17bb1f6b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -412,7 +412,7 @@ yield new DataStreamAutoShardingEvent( dataStream.rollover( indexMetadata.getIndex(), newGeneration, - metadata.isTimeSeriesTemplate(templateV2), + metadata.retrieveIndexModeFromTemplate(templateV2), dataStreamAutoShardingEvent ) ); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 1789acc1cb7a6..f04d07fb690c4 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -87,7 +87,6 @@ final class BulkOperation extends ActionRunnable { private final ConcurrentLinkedQueue failureStoreRedirects = new ConcurrentLinkedQueue<>(); private final long startTimeNanos; private final ClusterStateObserver observer; - private final Map indicesThatCannotBeCreated; private final Executor executor; private final LongSupplier relativeTimeProvider; private final FailureStoreDocumentConverter failureStoreDocumentConverter; @@ -107,7 +106,6 @@ final class BulkOperation extends ActionRunnable { BulkRequest bulkRequest, NodeClient client, AtomicArray responses, - Map indicesThatCannotBeCreated, IndexNameExpressionResolver indexNameExpressionResolver, LongSupplier relativeTimeProvider, long startTimeNanos, @@ -122,7 +120,6 @@ final class BulkOperation extends ActionRunnable { bulkRequest, client, responses, - indicesThatCannotBeCreated, indexNameExpressionResolver, relativeTimeProvider, startTimeNanos, @@ -141,7 +138,6 @@ final class BulkOperation extends ActionRunnable { BulkRequest bulkRequest, NodeClient client, AtomicArray responses, - Map indicesThatCannotBeCreated, IndexNameExpressionResolver indexNameExpressionResolver, LongSupplier relativeTimeProvider, long startTimeNanos, @@ -158,7 +154,6 @@ final class BulkOperation extends ActionRunnable { this.bulkRequest = bulkRequest; this.listener = listener; this.startTimeNanos = startTimeNanos; - this.indicesThatCannotBeCreated = indicesThatCannotBeCreated; this.executor = executor; this.relativeTimeProvider = relativeTimeProvider; this.indexNameExpressionResolver = indexNameExpressionResolver; @@ -298,9 +293,6 @@ private Map> groupRequestsByShards( if (addFailureIfRequiresAliasAndAliasIsMissing(docWriteRequest, bulkItemRequest.id(), metadata)) { continue; } - if (addFailureIfIndexCannotBeCreated(docWriteRequest, bulkItemRequest.id())) { - continue; - } if (addFailureIfRequiresDataStreamAndNoParentDataStream(docWriteRequest, bulkItemRequest.id(), metadata)) { continue; } @@ -759,18 +751,6 @@ private boolean addFailureIfIndexIsClosed(DocWriteRequest request, Index conc return false; } - private boolean addFailureIfIndexCannotBeCreated(DocWriteRequest request, int idx) { - IndexNotFoundException cannotCreate = indicesThatCannotBeCreated.get(request.index()); - if (cannotCreate != null) { - var failureStoreStatus = isFailureStoreRequest(request) - ? IndexDocFailureStoreStatus.FAILED - : IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN; - addFailureAndDiscardRequest(request, idx, request.index(), cannotCreate, failureStoreStatus); - return true; - } - return false; - } - private static boolean isFailureStoreRequest(DocWriteRequest request) { return request instanceof IndexRequest ir && ir.isWriteToFailureStore(); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 03768af029141..61adf41a9a276 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; import org.elasticsearch.indices.SystemIndices; @@ -60,6 +59,7 @@ import java.util.Objects; import java.util.Set; import java.util.SortedMap; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.function.Function; import java.util.function.LongSupplier; @@ -351,29 +351,36 @@ protected void createMissingIndicesAndIndexData( final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); // Optimizing when there are no prerequisite actions if (indicesToAutoCreate.isEmpty() && dataStreamsToBeRolledOver.isEmpty() && failureStoresToBeRolledOver.isEmpty()) { - executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, Map.of()); + executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses); return; } - final Map indicesThatCannotBeCreated = new HashMap<>(); + Map indicesExceptions = new ConcurrentHashMap<>(); + Map dataStreamExceptions = new ConcurrentHashMap<>(); + Map failureStoreExceptions = new ConcurrentHashMap<>(); Runnable executeBulkRunnable = () -> executor.execute(new ActionRunnable<>(listener) { @Override protected void doRun() { - executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); + failRequestsWhenPrerequisiteActionFailed( + indicesExceptions, + dataStreamExceptions, + failureStoreExceptions, + bulkRequest, + responses + ); + executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses); } }); try (RefCountingRunnable refs = new RefCountingRunnable(executeBulkRunnable)) { - createIndices(bulkRequest, indicesToAutoCreate, indicesThatCannotBeCreated, responses, refs); - rollOverDataStreams(bulkRequest, dataStreamsToBeRolledOver, false, responses, refs); - rollOverDataStreams(bulkRequest, failureStoresToBeRolledOver, true, responses, refs); + createIndices(indicesToAutoCreate, refs, indicesExceptions); + rollOverDataStreams(bulkRequest, dataStreamsToBeRolledOver, false, refs, dataStreamExceptions); + rollOverDataStreams(bulkRequest, failureStoresToBeRolledOver, true, refs, failureStoreExceptions); } } private void createIndices( - BulkRequest bulkRequest, Map indicesToAutoCreate, - Map indicesThatCannotBeCreated, - AtomicArray responses, - RefCountingRunnable refs + RefCountingRunnable refs, + final Map indicesExceptions ) { for (Map.Entry indexEntry : indicesToAutoCreate.entrySet()) { final String index = indexEntry.getKey(); @@ -384,25 +391,26 @@ public void onResponse(CreateIndexResponse createIndexResponse) {} @Override public void onFailure(Exception e) { final Throwable cause = ExceptionsHelper.unwrapCause(e); - if (cause instanceof IndexNotFoundException indexNotFoundException) { - synchronized (indicesThatCannotBeCreated) { - indicesThatCannotBeCreated.put(index, indexNotFoundException); - } - } else if ((cause instanceof ResourceAlreadyExistsException) == false) { + if ((cause instanceof ResourceAlreadyExistsException) == false) { // fail all requests involving this index, if create didn't work - failRequestsWhenPrerequisiteActionFailed(index, bulkRequest, responses, e); + indicesExceptions.put(index, e); } } }, refs.acquire())); } } + // Separate method to allow for overriding in tests. + void createIndex(CreateIndexRequest createIndexRequest, ActionListener listener) { + client.execute(AutoCreateAction.INSTANCE, createIndexRequest, listener); + } + private void rollOverDataStreams( BulkRequest bulkRequest, Set dataStreamsToBeRolledOver, boolean targetFailureStore, - AtomicArray responses, - RefCountingRunnable refs + RefCountingRunnable refs, + Map dataStreamExceptions ) { for (String dataStream : dataStreamsToBeRolledOver) { RolloverRequest rolloverRequest = new RolloverRequest(dataStream, null); @@ -416,7 +424,7 @@ private void rollOverDataStreams( } // We are executing a lazy rollover because it is an action specialised for this situation, when we want an // unconditional and performant rollover. - rolloverClient.execute(LazyRolloverAction.INSTANCE, rolloverRequest, ActionListener.releaseAfter(new ActionListener<>() { + rollOver(rolloverRequest, ActionListener.releaseAfter(new ActionListener<>() { @Override public void onResponse(RolloverResponse result) { @@ -431,26 +439,52 @@ public void onResponse(RolloverResponse result) { @Override public void onFailure(Exception e) { - failRequestsWhenPrerequisiteActionFailed(dataStream, bulkRequest, responses, e); + dataStreamExceptions.put(dataStream, e); } }, refs.acquire())); } } + // Separate method to allow for overriding in tests. + void rollOver(RolloverRequest rolloverRequest, ActionListener listener) { + rolloverClient.execute(LazyRolloverAction.INSTANCE, rolloverRequest, listener); + } + /** - * Fails all requests involving this index or data stream because the prerequisite action failed too. + * Mark all the requests for which the prerequisite action failed (i.e. index creation or data stream/failure store rollover) as failed. */ - private static void failRequestsWhenPrerequisiteActionFailed( - String target, + private void failRequestsWhenPrerequisiteActionFailed( + Map indicesExceptions, + Map dataStreamExceptions, + Map failureStoreExceptions, BulkRequest bulkRequest, - AtomicArray responses, - Exception error + AtomicArray responses ) { + if (indicesExceptions.isEmpty() && dataStreamExceptions.isEmpty() && failureStoreExceptions.isEmpty()) { + return; + } for (int i = 0; i < bulkRequest.requests.size(); i++) { DocWriteRequest request = bulkRequest.requests.get(i); - if (request != null && setResponseFailureIfIndexMatches(responses, i, request, target, error)) { - bulkRequest.requests.set(i, null); + if (request == null) { + continue; } + var exception = indicesExceptions.get(request.index()); + if (exception == null) { + if (request instanceof IndexRequest indexRequest && indexRequest.isWriteToFailureStore()) { + exception = failureStoreExceptions.get(request.index()); + } else { + exception = dataStreamExceptions.get(request.index()); + } + } + if (exception == null) { + continue; + } + var failureStoreStatus = request instanceof IndexRequest ir && ir.isWriteToFailureStore() + ? IndexDocFailureStoreStatus.FAILED + : IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN; + var failure = new BulkItemResponse.Failure(request.index(), request.id(), exception, failureStoreStatus); + responses.set(i, BulkItemResponse.failure(i, request.opType(), failure)); + bulkRequest.requests.set(i, null); } } @@ -532,33 +566,13 @@ private static boolean isSystemIndex(SortedMap indices } } - void createIndex(CreateIndexRequest createIndexRequest, ActionListener listener) { - client.execute(AutoCreateAction.INSTANCE, createIndexRequest, listener); - } - - private static boolean setResponseFailureIfIndexMatches( - AtomicArray responses, - int idx, - DocWriteRequest request, - String index, - Exception e - ) { - if (index.equals(request.index())) { - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.id(), e); - responses.set(idx, BulkItemResponse.failure(idx, request.opType(), failure)); - return true; - } - return false; - } - void executeBulk( Task task, BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, Executor executor, - AtomicArray responses, - Map indicesThatCannotBeCreated + AtomicArray responses ) { new BulkOperation( task, @@ -568,7 +582,6 @@ void executeBulk( bulkRequest, client, responses, - indicesThatCannotBeCreated, indexNameExpressionResolver, relativeTimeNanosProvider, startTimeNanos, diff --git a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index 42957e7c932d1..9f40584733250 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -12,32 +12,25 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * Represents a failure to search on a specific shard. */ public class ShardSearchFailure extends ShardOperationFailedException { - private static final String REASON_FIELD = "reason"; - private static final String NODE_FIELD = "node"; - private static final String INDEX_FIELD = "index"; - private static final String SHARD_FIELD = "shard"; + public static final String REASON_FIELD = "reason"; + public static final String NODE_FIELD = "node"; + public static final String INDEX_FIELD = "index"; + public static final String SHARD_FIELD = "shard"; public static final ShardSearchFailure[] EMPTY_ARRAY = new ShardSearchFailure[0]; @@ -125,51 +118,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static ShardSearchFailure fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token; - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - String currentFieldName = null; - int shardId = -1; - String indexName = null; - String clusterAlias = null; - String nodeId = null; - ElasticsearchException exception = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (SHARD_FIELD.equals(currentFieldName)) { - shardId = parser.intValue(); - } else if (INDEX_FIELD.equals(currentFieldName)) { - indexName = parser.text(); - int indexOf = indexName.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); - if (indexOf > 0) { - clusterAlias = indexName.substring(0, indexOf); - indexName = indexName.substring(indexOf + 1); - } - } else if (NODE_FIELD.equals(currentFieldName)) { - nodeId = parser.text(); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (REASON_FIELD.equals(currentFieldName)) { - exception = ElasticsearchException.fromXContent(parser); - } else { - parser.skipChildren(); - } - } else { - parser.skipChildren(); - } - } - SearchShardTarget searchShardTarget = null; - if (nodeId != null) { - searchShardTarget = new SearchShardTarget( - nodeId, - new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), - clusterAlias - ); - } - return new ShardSearchFailure(exception, searchShardTarget); - } } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index 4ed8feb098ad2..4e3544f0170cb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -110,15 +110,9 @@ private static SearchContextIdForNode readSearchContextIdForNodeExcludingContext private static SearchContextIdForNode innerReadSearchContextIdForNode(String contextUUID, StreamInput in) throws IOException { long id = in.readLong(); - String target = in.readString(); - String clusterAlias; - final int index = target.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); - if (index == -1) { - clusterAlias = null; - } else { - clusterAlias = target.substring(0, index); - target = target.substring(index + 1); - } + String[] split = RemoteClusterAware.splitIndexName(in.readString()); + String clusterAlias = split[0]; + String target = split[1]; return new SearchContextIdForNode(clusterAlias, target, new ShardSearchContextId(contextUUID, id)); } diff --git a/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java index 8e90a459bcafd..014fef8bdfe56 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java @@ -18,6 +18,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportResponse; import java.util.concurrent.Executor; @@ -75,12 +76,18 @@ public RemoteClusterClient getRemoteClusterClient( return new RemoteClusterClient() { @Override public void execute( + Transport.Connection connection, RemoteClusterActionType action, Request request, ActionListener listener ) { request.setParentTask(parentTask); - delegate.execute(action, request, listener); + delegate.execute(connection, action, request, listener); + } + + @Override + public void getConnection(Request request, ActionListener listener) { + delegate.getConnection(request, listener); } }; } diff --git a/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java b/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java index 56ab07d26b4c7..9e3497601fb57 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java @@ -13,6 +13,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportResponse; /** @@ -21,16 +23,31 @@ public interface RemoteClusterClient { /** * Executes an action, denoted by an {@link ActionType}, on the remote cluster. - * - * @param action The action type to execute. - * @param request The action request. - * @param listener A listener for the response - * @param The request type. - * @param the response type. + */ + default void execute( + RemoteClusterActionType action, + Request request, + ActionListener listener + ) { + getConnection( + request, + listener.delegateFailureAndWrap((responseListener, connection) -> execute(connection, action, request, responseListener)) + ); + } + + /** + * Executes an action, denoted by an {@link ActionType}, using a connection to the remote cluster obtained using {@link #getConnection}. */ void execute( + Transport.Connection connection, RemoteClusterActionType action, Request request, ActionListener listener ); + + /** + * Obtain a connection to the remote cluster for use with the {@link #execute} override that allows to specify the connection. Useful + * for cases where you need to inspect {@link Transport.Connection#getVersion} before deciding the exact remote action to invoke. + */ + void getConnection(@Nullable Request request, ActionListener listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 5634d40993b4f..78902f5e27c90 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -448,43 +448,52 @@ public DataStreamIndices getDataStreamIndices(boolean failureStore) { * Performs a rollover on a {@code DataStream} instance and returns a new instance containing * the updated list of backing indices and incremented generation. * - * @param writeIndex new write index - * @param generation new generation - * @param timeSeries whether the template that created this data stream is in time series mode - * @param autoShardingEvent the auto sharding event this rollover operation is applying - * + * @param writeIndex new write index + * @param generation new generation + * @param indexModeFromTemplate the index mode that originates from the template that created this data stream + * @param autoShardingEvent the auto sharding event this rollover operation is applying * @return new {@code DataStream} instance with the rollover operation applied */ public DataStream rollover( Index writeIndex, long generation, - boolean timeSeries, + IndexMode indexModeFromTemplate, @Nullable DataStreamAutoShardingEvent autoShardingEvent ) { ensureNotReplicated(); - return unsafeRollover(writeIndex, generation, timeSeries, autoShardingEvent); + return unsafeRollover(writeIndex, generation, indexModeFromTemplate, autoShardingEvent); } /** - * Like {@link #rollover(Index, long, boolean, DataStreamAutoShardingEvent)}, but does no validation, use with care only. + * Like {@link #rollover(Index, long, IndexMode, DataStreamAutoShardingEvent)}, but does no validation, use with care only. */ - public DataStream unsafeRollover(Index writeIndex, long generation, boolean timeSeries, DataStreamAutoShardingEvent autoShardingEvent) { - IndexMode indexMode = this.indexMode; - if ((indexMode == null || indexMode == IndexMode.STANDARD) && timeSeries) { + public DataStream unsafeRollover( + Index writeIndex, + long generation, + IndexMode indexModeFromTemplate, + DataStreamAutoShardingEvent autoShardingEvent + ) { + IndexMode dsIndexMode = this.indexMode; + if ((dsIndexMode == null || dsIndexMode == IndexMode.STANDARD) && indexModeFromTemplate == IndexMode.TIME_SERIES) { // This allows for migrating a data stream to be a tsdb data stream: // (only if index_mode=null|standard then allow it to be set to time_series) - indexMode = IndexMode.TIME_SERIES; - } else if (indexMode == IndexMode.TIME_SERIES && timeSeries == false) { + dsIndexMode = IndexMode.TIME_SERIES; + } else if (dsIndexMode == IndexMode.TIME_SERIES && (indexModeFromTemplate == null || indexModeFromTemplate == IndexMode.STANDARD)) { + // Allow downgrading a time series data stream to a regular data stream + dsIndexMode = null; + } else if ((dsIndexMode == null || dsIndexMode == IndexMode.STANDARD) && indexModeFromTemplate == IndexMode.LOGSDB) { + dsIndexMode = IndexMode.LOGSDB; + } else if (dsIndexMode == IndexMode.LOGSDB && (indexModeFromTemplate == null || indexModeFromTemplate == IndexMode.STANDARD)) { // Allow downgrading a time series data stream to a regular data stream - indexMode = null; + dsIndexMode = null; } List backingIndices = new ArrayList<>(this.backingIndices.indices); backingIndices.add(writeIndex); return copy().setBackingIndices( this.backingIndices.copy().setIndices(backingIndices).setAutoShardingEvent(autoShardingEvent).setRolloverOnWrite(false).build() - ).setGeneration(generation).setIndexMode(indexMode).build(); + ).setGeneration(generation).setIndexMode(dsIndexMode).build(); } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 6e865db0ebb39..2229166a2d779 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -37,6 +37,7 @@ import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; +import org.elasticsearch.transport.RemoteClusterAware; import java.time.Instant; import java.time.ZoneId; @@ -1753,7 +1754,7 @@ private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions o return; } for (String index : indexExpressions) { - if (index.contains(":")) { + if (RemoteClusterAware.isRemoteIndexName(index)) { failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); } } @@ -1762,7 +1763,7 @@ private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions o private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { List crossClusterIndices = new ArrayList<>(); for (String index : indexExpressions) { - if (index.contains(":")) { + if (RemoteClusterAware.isRemoteIndexName(index)) { crossClusterIndices.add(index); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index d2f5ab5eabaee..7991e5a641850 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -1305,16 +1305,10 @@ public Map templatesV2() { .orElse(Collections.emptyMap()); } + // TODO: remove this method: public boolean isTimeSeriesTemplate(ComposableIndexTemplate indexTemplate) { - if (indexTemplate.getDataStreamTemplate() == null) { - return false; - } - - var settings = MetadataIndexTemplateService.resolveSettings(indexTemplate, componentTemplates()); - // Not using IndexSettings.MODE.get() to avoid validation that may fail at this point. - var rawIndexMode = settings.get(IndexSettings.MODE.getKey()); - var indexMode = rawIndexMode != null ? Enum.valueOf(IndexMode.class, rawIndexMode.toUpperCase(Locale.ROOT)) : null; - if (indexMode == IndexMode.TIME_SERIES) { + var indexModeFromTemplate = retrieveIndexModeFromTemplate(indexTemplate); + if (indexModeFromTemplate == IndexMode.TIME_SERIES) { // No need to check for the existence of index.routing_path here, because index.mode=time_series can't be specified without it. // Setting validation takes care of this. // Also no need to validate that the fields defined in index.routing_path are keyword fields with time_series_dimension @@ -1328,6 +1322,17 @@ public boolean isTimeSeriesTemplate(ComposableIndexTemplate indexTemplate) { return false; } + public IndexMode retrieveIndexModeFromTemplate(ComposableIndexTemplate indexTemplate) { + if (indexTemplate.getDataStreamTemplate() == null) { + return null; + } + + var settings = MetadataIndexTemplateService.resolveSettings(indexTemplate, componentTemplates()); + // Not using IndexSettings.MODE.get() to avoid validation that may fail at this point. + var rawIndexMode = settings.get(IndexSettings.MODE.getKey()); + return rawIndexMode != null ? Enum.valueOf(IndexMode.class, rawIndexMode.toUpperCase(Locale.ROOT)) : null; + } + public Map dataStreams() { return this.custom(DataStreamMetadata.TYPE, DataStreamMetadata.EMPTY).dataStreams(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 69f753233b418..80e6483bb086d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -313,7 +313,7 @@ static ClusterState createDataStream( .collect(Collectors.toCollection(ArrayList::new)); dsBackingIndices.add(writeIndex.getIndex()); boolean hidden = isSystem || template.getDataStreamTemplate().isHidden(); - final IndexMode indexMode = metadata.isTimeSeriesTemplate(template) ? IndexMode.TIME_SERIES : null; + final IndexMode indexMode = metadata.retrieveIndexModeFromTemplate(template); final DataStreamLifecycle lifecycle = isSystem ? MetadataIndexTemplateService.resolveLifecycle(template, systemDataStreamDescriptor.getComponentTemplates()) : MetadataIndexTemplateService.resolveLifecycle(template, metadata.componentTemplates()); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index 0b39de4d0afef..c55ad5570e038 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -115,12 +115,12 @@ public DesiredBalanceReconciler(ClusterSettings clusterSettings, ThreadPool thre undesiredAllocations = LongGaugeMetric.create( meterRegistry, "es.allocator.desired_balance.allocations.undesired.current", - "Total number of shards allocated on undesired nodes", + "Total number of shards allocated on undesired nodes excluding shutting down nodes", "{shard}" ); undesiredAllocationsRatio = meterRegistry.registerDoubleGauge( "es.allocator.desired_balance.allocations.undesired.ratio", - "Ratio of undesired allocations to shard count", + "Ratio of undesired allocations to shard count excluding shutting down nodes", "1", () -> { var total = totalAllocations.get(); @@ -500,7 +500,7 @@ private void balance() { int unassignedShards = routingNodes.unassigned().size() + routingNodes.unassigned().ignored().size(); int totalAllocations = 0; - int undesiredAllocations = 0; + int undesiredAllocationsExcludingShuttingDownNodes = 0; // Iterate over all started shards and try to move any which are on undesired nodes. In the presence of throttling shard // movements, the goal of this iteration order is to achieve a fairer movement of shards from the nodes that are offloading the @@ -526,7 +526,9 @@ private void balance() { continue; } - undesiredAllocations++; + if (allocation.metadata().nodeShutdowns().contains(shardRouting.currentNodeId()) == false) { + undesiredAllocationsExcludingShuttingDownNodes++; + } if (allocation.deciders().canRebalance(shardRouting, allocation).type() != Decision.Type.YES) { // rebalancing disabled for this shard @@ -560,23 +562,23 @@ private void balance() { } DesiredBalanceReconciler.this.unassignedShards.set(unassignedShards); - DesiredBalanceReconciler.this.undesiredAllocations.set(undesiredAllocations); + DesiredBalanceReconciler.this.undesiredAllocations.set(undesiredAllocationsExcludingShuttingDownNodes); DesiredBalanceReconciler.this.totalAllocations.set(totalAllocations); - maybeLogUndesiredAllocationsWarning(totalAllocations, undesiredAllocations, routingNodes.size()); + maybeLogUndesiredAllocationsWarning(totalAllocations, undesiredAllocationsExcludingShuttingDownNodes, routingNodes.size()); } - private void maybeLogUndesiredAllocationsWarning(int allAllocations, int undesiredAllocations, int nodeCount) { + private void maybeLogUndesiredAllocationsWarning(int totalAllocations, int undesiredAllocations, int nodeCount) { // more shards than cluster can relocate with one reroute final boolean nonEmptyRelocationBacklog = undesiredAllocations > 2L * nodeCount; - final boolean warningThresholdReached = undesiredAllocations > undesiredAllocationsLogThreshold * allAllocations; - if (allAllocations > 0 && nonEmptyRelocationBacklog && warningThresholdReached) { + final boolean warningThresholdReached = undesiredAllocations > undesiredAllocationsLogThreshold * totalAllocations; + if (totalAllocations > 0 && nonEmptyRelocationBacklog && warningThresholdReached) { undesiredAllocationLogInterval.maybeExecute( () -> logger.warn( "[{}] of assigned shards ({}/{}) are not on their desired nodes, which exceeds the warn threshold of [{}]", - Strings.format1Decimals(100.0 * undesiredAllocations / allAllocations, "%"), + Strings.format1Decimals(100.0 * undesiredAllocations / totalAllocations, "%"), undesiredAllocations, - allAllocations, + totalAllocations, Strings.format1Decimals(100.0 * undesiredAllocationsLogThreshold, "%") ) ); diff --git a/server/src/main/java/org/elasticsearch/features/FeatureData.java b/server/src/main/java/org/elasticsearch/features/FeatureData.java index f2fdac937fc96..991bb4d82be3d 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureData.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureData.java @@ -11,6 +11,8 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Strings; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import java.util.Collections; import java.util.HashMap; @@ -28,6 +30,16 @@ * features for the consumption of {@link FeatureService} */ public class FeatureData { + + private static final Logger Log = LogManager.getLogger(FeatureData.class); + private static final boolean INCLUDE_TEST_FEATURES = System.getProperty("tests.testfeatures.enabled", "").equals("true"); + + static { + if (INCLUDE_TEST_FEATURES) { + Log.warn("WARNING: Test features are enabled. This should ONLY be used in automated tests."); + } + } + private final NavigableMap> historicalFeatures; private final Map nodeFeatures; @@ -43,7 +55,11 @@ public static FeatureData createFromSpecifications(List> historicalFeatures = new TreeMap<>(Map.of(Version.V_EMPTY, Set.of())); Map nodeFeatures = new HashMap<>(); for (FeatureSpecification spec : specs) { - var specFeatures = spec.getFeatures(); + Set specFeatures = spec.getFeatures(); + if (INCLUDE_TEST_FEATURES) { + specFeatures = new HashSet<>(specFeatures); + specFeatures.addAll(spec.getTestFeatures()); + } for (var hfe : spec.getHistoricalFeatures().entrySet()) { FeatureSpecification existing = allFeatures.putIfAbsent(hfe.getKey().id(), spec); diff --git a/server/src/main/java/org/elasticsearch/features/FeatureInfrastructureFeatures.java b/server/src/main/java/org/elasticsearch/features/FeatureInfrastructureFeatures.java index 53eaef369778f..76afb5eba8a47 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureInfrastructureFeatures.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureInfrastructureFeatures.java @@ -24,4 +24,9 @@ public class FeatureInfrastructureFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(FeatureService.FEATURES_SUPPORTED); } + + @Override + public Set getTestFeatures() { + return Set.of(FeatureService.TEST_FEATURES_ENABLED); + } } diff --git a/server/src/main/java/org/elasticsearch/features/FeatureService.java b/server/src/main/java/org/elasticsearch/features/FeatureService.java index 250a4541b0869..1d911a75a4838 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureService.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureService.java @@ -30,6 +30,7 @@ public class FeatureService { * A feature indicating that node features are supported. */ public static final NodeFeature FEATURES_SUPPORTED = new NodeFeature("features_supported"); + public static final NodeFeature TEST_FEATURES_ENABLED = new NodeFeature("test_features_enabled"); private static final Logger logger = LogManager.getLogger(FeatureService.class); diff --git a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java index db69ef00756b8..03f0dd89f172e 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java @@ -40,6 +40,16 @@ default Set getFeatures() { return Set.of(); } + /** + * Returns a set of test features that this node supports. + *

+ * These features will only be exposed if the {@code tests.testfeatures.enabled} system property is set. + * This should only be used when deploying test clusters. + */ + default Set getTestFeatures() { + return Set.of(); + } + /** * Returns information on historical features that should be deemed to be present on all nodes * on or above the {@link Version} specified. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index ebe9f27f461cf..7f9b59d427656 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -389,6 +389,14 @@ static Mapping createDynamicUpdate(DocumentParserContext context) { rootBuilder.addRuntimeField(runtimeField); } RootObjectMapper root = rootBuilder.build(MapperBuilderContext.root(context.mappingLookup().isSourceSynthetic(), false)); + + // Repeat the check, in case the dynamic mappers don't produce a mapping update. + // For instance, the parsed source may contain intermediate objects that get flattened, + // leading to an empty dynamic update. + if (root.mappers.isEmpty() && root.runtimeFields().isEmpty()) { + return null; + } + return context.mappingLookup().getMapping().mappingUpdate(root); } @@ -638,7 +646,7 @@ private static void parseObject(final DocumentParserContext context, String curr private static void doParseObject(DocumentParserContext context, String currentFieldName, Mapper objectMapper) throws IOException { context.path().add(currentFieldName); boolean withinLeafObject = context.path().isWithinLeafObject(); - if (objectMapper instanceof ObjectMapper objMapper && objMapper.subobjects() != ObjectMapper.Subobjects.ENABLED) { + if (objectMapper instanceof ObjectMapper objMapper && objMapper.subobjects() == ObjectMapper.Subobjects.DISABLED) { context.path().setWithinLeafObject(true); } parseObjectOrField(context, objectMapper); @@ -1012,11 +1020,15 @@ private static Mapper getLeafMapper(final DocumentParserContext context, String // don't create a dynamic mapping for it and don't index it. String fieldPath = context.path().pathAsText(fieldName); MappedFieldType fieldType = context.mappingLookup().getFieldType(fieldPath); - if (fieldType != null) { - // we haven't found a mapper with this name above, which means if a field type is found it is for sure a runtime field. - assert fieldType.hasDocValues() == false && fieldType.isAggregatable() && fieldType.isSearchable(); + + if (fieldType != null && fieldType.hasDocValues() == false && fieldType.isAggregatable() && fieldType.isSearchable()) { + // We haven't found a mapper with this name above, which means it is a runtime field. return noopFieldMapper(fieldPath); } + // No match or the matching field type corresponds to a mapper with flattened name (containing dots), + // e.g. for field 'foo.bar' under root there is no 'bar' mapper in object 'bar'. + // Returning null leads to creating a dynamic mapper. In the case of a mapper with flattened name, + // the dynamic mapper later gets deduplicated when building the dynamic update for the doc at hand. return null; } @@ -1160,11 +1172,10 @@ private static class RootDocumentParserContext extends DocumentParserContext { mappingLookup.getMapping().getRoot(), ObjectMapper.Dynamic.getRootDynamic(mappingLookup) ); - if (mappingLookup.getMapping().getRoot().subobjects() == ObjectMapper.Subobjects.ENABLED) { - this.parser = DotExpandingXContentParser.expandDots(parser, this.path); - } else { - this.parser = parser; - } + // If root supports no subobjects, there's no point in expanding dots in names to subobjects. + this.parser = (mappingLookup.getMapping().getRoot().subobjects() == ObjectMapper.Subobjects.DISABLED) + ? parser + : DotExpandingXContentParser.expandDots(parser, this.path, this); this.document = new LuceneDocument(); this.documents.add(document); this.maxAllowedNumNestedDocs = indexSettings().getMappingNestedDocsLimit(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index c2970d8716147..b8acdb716b467 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -123,6 +123,7 @@ public int get() { private Field version; private final SeqNoFieldMapper.SequenceIDFields seqID; private final Set fieldsAppliedFromTemplates; + private final boolean supportsObjectAutoFlattening; /** * Fields that are copied from values of other fields via copy_to. @@ -177,6 +178,7 @@ private DocumentParserContext( this.copyToFields = copyToFields; this.dynamicMappersSize = dynamicMapperSize; this.recordedSource = recordedSource; + this.supportsObjectAutoFlattening = checkForAutoFlatteningSupport(); } private DocumentParserContext(ObjectMapper parent, ObjectMapper.Dynamic dynamic, DocumentParserContext in) { @@ -204,6 +206,43 @@ private DocumentParserContext(ObjectMapper parent, ObjectMapper.Dynamic dynamic, ); } + private boolean checkForAutoFlatteningSupport() { + if (root().subobjects() != ObjectMapper.Subobjects.ENABLED) { + return true; + } + for (ObjectMapper objectMapper : mappingLookup.objectMappers().values()) { + if (objectMapper.subobjects() != ObjectMapper.Subobjects.ENABLED) { + return true; + } + } + if (root().dynamicTemplates() != null) { + for (DynamicTemplate dynamicTemplate : root().dynamicTemplates()) { + if (findSubobjects(dynamicTemplate.getMapping())) { + return true; + } + } + } + for (ObjectMapper objectMapper : dynamicObjectMappers.values()) { + if (objectMapper.subobjects() != ObjectMapper.Subobjects.ENABLED) { + return true; + } + } + return false; + } + + @SuppressWarnings("unchecked") + private static boolean findSubobjects(Map mapping) { + for (var entry : mapping.entrySet()) { + if (entry.getKey().equals("subobjects") && (entry.getValue() instanceof Boolean || entry.getValue() instanceof String)) { + return true; + } + if (entry.getValue() instanceof Map && findSubobjects((Map) entry.getValue())) { + return true; + } + } + return false; + } + protected DocumentParserContext( MappingLookup mappingLookup, MappingParserContext mappingParserContext, @@ -464,6 +503,10 @@ public Set getCopyToFields() { return copyToFields; } + boolean supportsObjectAutoFlattening() { + return supportsObjectAutoFlattening; + } + /** * Add a new mapper dynamically created while parsing. * @@ -599,6 +642,25 @@ final ObjectMapper getDynamicObjectMapper(String name) { return dynamicObjectMappers.get(name); } + ObjectMapper findObject(String fullName) { + // does the object mapper already exist? if so, use that + ObjectMapper objectMapper = mappingLookup().objectMappers().get(fullName); + if (objectMapper != null) { + return objectMapper; + } + // has the object mapper been added as a dynamic update already? + return getDynamicObjectMapper(fullName); + } + + ObjectMapper.Builder findObjectBuilder(String fullName) { + // does the object mapper already exist? if so, use that + ObjectMapper objectMapper = findObject(fullName); + if (objectMapper != null) { + return objectMapper.newBuilder(indexSettings().getIndexVersionCreated()); + } + return null; + } + /** * Add a new runtime field dynamically created while parsing. * We use the same set for both new indexed and new runtime fields, @@ -698,7 +760,7 @@ public LuceneDocument doc() { */ public final DocumentParserContext createCopyToContext(String copyToField, LuceneDocument doc) throws IOException { ContentPath path = new ContentPath(); - XContentParser parser = DotExpandingXContentParser.expandDots(new CopyToParser(copyToField, parser()), path); + XContentParser parser = DotExpandingXContentParser.expandDots(new CopyToParser(copyToField, parser()), path, this); return new Wrapper(root(), this) { @Override public ContentPath path() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DotExpandingXContentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DotExpandingXContentParser.java index fc003e709cbca..728c7ac6f25ac 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DotExpandingXContentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DotExpandingXContentParser.java @@ -18,6 +18,8 @@ import java.io.IOException; import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Deque; import java.util.List; import java.util.Map; @@ -38,9 +40,13 @@ private static final class WrappingParser extends FilterXContentParser { private final ContentPath contentPath; final Deque parsers = new ArrayDeque<>(); + final DocumentParserContext context; + boolean supportsObjectAutoFlattening; - WrappingParser(XContentParser in, ContentPath contentPath) throws IOException { + WrappingParser(XContentParser in, ContentPath contentPath, DocumentParserContext context) throws IOException { this.contentPath = contentPath; + this.context = context; + this.supportsObjectAutoFlattening = (context != null && context.supportsObjectAutoFlattening()); parsers.push(in); if (in.currentToken() == Token.FIELD_NAME) { expandDots(in); @@ -107,7 +113,7 @@ private void doExpandDots(XContentParser delegate, String field, int dotCount) t if (resultSize == 0) { throw new IllegalArgumentException("field name cannot contain only dots"); } - final String[] subpaths; + String[] subpaths; if (resultSize == list.length) { for (String part : list) { // check if the field name contains only whitespace @@ -126,6 +132,9 @@ private void doExpandDots(XContentParser delegate, String field, int dotCount) t } subpaths = extractAndValidateResults(field, list, resultSize); } + if (supportsObjectAutoFlattening && subpaths.length > 1) { + subpaths = maybeFlattenPaths(Arrays.asList(subpaths), context, contentPath).toArray(String[]::new); + } pushSubParser(delegate, subpaths); } @@ -235,11 +244,13 @@ public List listOrderedMap() throws IOException { /** * Wraps an XContentParser such that it re-interprets dots in field names as an object structure - * @param in the parser to wrap - * @return the wrapped XContentParser + * @param in the parser to wrap + * @param contentPath the starting path to expand, can be empty + * @param context provides mapping context to check for objects supporting sub-object auto-flattening + * @return the wrapped XContentParser */ - static XContentParser expandDots(XContentParser in, ContentPath contentPath) throws IOException { - return new WrappingParser(in, contentPath); + static XContentParser expandDots(XContentParser in, ContentPath contentPath, DocumentParserContext context) throws IOException { + return new WrappingParser(in, contentPath, context); } private enum State { @@ -410,4 +421,49 @@ public Token nextToken() throws IOException { return null; } } + + static List maybeFlattenPaths(List subpaths, DocumentParserContext context, ContentPath contentPath) { + String prefixWithDots = contentPath.pathAsText(""); + ObjectMapper parent = contentPath.length() == 0 + ? context.root() + : context.findObject(prefixWithDots.substring(0, prefixWithDots.length() - 1)); + List result = new ArrayList<>(subpaths.size()); + for (int i = 0; i < subpaths.size(); i++) { + String fullPath = prefixWithDots + String.join(".", subpaths.subList(0, i)); + if (i > 0) { + parent = context.findObject(fullPath); + } + boolean match = false; + StringBuilder path = new StringBuilder(subpaths.get(i)); + if (parent == null) { + // We get here for dynamic objects, which always get parsed with subobjects and may get flattened later. + match = true; + } else if (parent.subobjects() == ObjectMapper.Subobjects.ENABLED) { + match = true; + } else if (parent.subobjects() == ObjectMapper.Subobjects.AUTO) { + // Check if there's any subobject in the remaining path. + for (int j = i; j < subpaths.size() - 1; j++) { + if (j > i) { + path.append(".").append(subpaths.get(j)); + } + Mapper mapper = parent.mappers.get(path.toString()); + if (mapper instanceof ObjectMapper objectMapper + && (ObjectMapper.isFlatteningCandidate(objectMapper.subobjects, objectMapper) + || objectMapper.checkFlattenable(null).isPresent())) { + i = j; + match = true; + break; + } + } + } + if (match) { + result.add(path.toString()); + } else { + // We only get here if parent has subobjects set to false, or set to auto with no non-flattenable object in the sub-path. + result.add(String.join(".", subpaths.subList(i, subpaths.size()))); + return result; + } + } + return result; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java index 4b6419b85e155..cf810e278782a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.time.DateTimeException; import java.util.Map; +import java.util.Optional; /** * Encapsulates the logic for dynamically creating fields as part of document parsing. @@ -162,7 +163,9 @@ static Mapper createDynamicObjectMapper(DocumentParserContext context, String na Mapper mapper = createObjectMapperFromTemplate(context, name); return mapper != null ? mapper - : new ObjectMapper.Builder(name, context.parent().subobjects).enabled(ObjectMapper.Defaults.ENABLED) + // Dynamic objects are configured with subobject support, otherwise they can't get auto-flattened + // even if they otherwise qualify. + : new ObjectMapper.Builder(name, Optional.empty()).enabled(ObjectMapper.Defaults.ENABLED) .build(context.createDynamicMapperBuilderContext()); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 46b1dbdce4c4b..529ff19bfffd7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -1093,7 +1093,7 @@ protected BytesRef preserve(BytesRef value) { }); } - if (fieldType().ignoreAbove != ignoreAboveDefault) { + if (fieldType().ignoreAbove != Integer.MAX_VALUE) { layers.add(new CompositeSyntheticFieldLoader.StoredFieldLayer(originalName()) { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 2f665fd5d1e6a..31df558492b35 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -36,6 +36,7 @@ public Set getFeatures() { NodeMappingStats.SEGMENT_LEVEL_FIELDS_STATS, BooleanFieldMapper.BOOLEAN_DIMENSION, ObjectMapper.SUBOBJECTS_AUTO, + ObjectMapper.SUBOBJECTS_AUTO_FIXES, KeywordFieldMapper.KEYWORD_NORMALIZER_SYNTHETIC_SOURCE, SourceFieldMapper.SYNTHETIC_SOURCE_STORED_FIELDS_ADVANCE_FIX, Mapper.SYNTHETIC_SOURCE_KEEP_FEATURE, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index a7e1b3e122060..08461525526b9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -125,7 +125,8 @@ public boolean isAutoUpdate() { "index.mapping.total_fields.ignore_dynamic_beyond_limit", false, Property.Dynamic, - Property.IndexScope + Property.IndexScope, + Property.ServerlessPublic ); public static final Setting INDEX_MAPPING_DEPTH_LIMIT_SETTING = Setting.longSetting( "index.mapping.depth.limit", diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index f9c854749e885..b9b611d8c62f9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -45,6 +45,7 @@ public class ObjectMapper extends Mapper { public static final String CONTENT_TYPE = "object"; static final String STORE_ARRAY_SOURCE_PARAM = "store_array_source"; static final NodeFeature SUBOBJECTS_AUTO = new NodeFeature("mapper.subobjects_auto"); + static final NodeFeature SUBOBJECTS_AUTO_FIXES = new NodeFeature("mapper.subobjects_auto_fixes"); /** * Enhances the previously boolean option for subobjects support with an intermediate mode `auto` that uses @@ -176,42 +177,84 @@ public final void addDynamic(String name, String prefix, Mapper mapper, Document // If the mapper to add has no dots, or the current object mapper has subobjects set to false, // we just add it as it is for sure a leaf mapper if (name.contains(".") == false || (subobjects.isPresent() && (subobjects.get() == Subobjects.DISABLED))) { - add(name, mapper); - } else { - // We strip off the first object path of the mapper name, load or create - // the relevant object mapper, and then recurse down into it, passing the remainder - // of the mapper name. So for a mapper 'foo.bar.baz', we locate 'foo' and then - // call addDynamic on it with the name 'bar.baz', and next call addDynamic on 'bar' with the name 'baz'. - int firstDotIndex = name.indexOf('.'); - String immediateChild = name.substring(0, firstDotIndex); - String immediateChildFullName = prefix == null ? immediateChild : prefix + "." + immediateChild; - Builder parentBuilder = findObjectBuilder(immediateChildFullName, context); - if (parentBuilder != null) { - parentBuilder.addDynamic(name.substring(firstDotIndex + 1), immediateChildFullName, mapper, context); - add(parentBuilder); - } else if (subobjects.isPresent() && subobjects.get() == Subobjects.AUTO) { - // No matching parent object was found, the mapper is added as a leaf - similar to subobjects false. - add(name, mapper); - } else { - // Expected to find a matching parent object but got null. - throw new IllegalStateException("Missing intermediate object " + immediateChildFullName); + if (mapper instanceof ObjectMapper objectMapper + && isFlatteningCandidate(subobjects, objectMapper) + && objectMapper.checkFlattenable(null).isEmpty()) { + // Subobjects auto and false don't allow adding subobjects dynamically. + return; } + add(name, mapper); + return; } - } + if (subobjects.isPresent() && subobjects.get() == Subobjects.AUTO) { + // Check if there's an existing field with the sanme, to avoid no-op dynamic updates. + ObjectMapper objectMapper = (prefix == null) ? context.root() : context.mappingLookup().objectMappers().get(prefix); + if (objectMapper != null && objectMapper.mappers.containsKey(name)) { + return; + } + + // Check for parent objects. Due to auto-flattening, names with dots are allowed so we need to check for all possible + // object names. For instance, for mapper 'foo.bar.baz.bad', we have the following options: + // -> object 'foo' found => call addDynamic on 'bar.baz.bad' + // ---> object 'bar' found => call addDynamic on 'baz.bad' + // -----> object 'baz' found => add field 'bad' to it + // -----> no match found => add field 'baz.bad' to 'bar' + // ---> object 'bar.baz' found => add field 'bad' to it + // ---> no match found => add field 'bar.baz.bad' to 'foo' + // -> object 'foo.bar' found => call addDynamic on 'baz.bad' + // ---> object 'baz' found => add field 'bad' to it + // ---> no match found=> add field 'baz.bad' to 'foo.bar' + // -> object 'foo.bar.baz' found => add field 'bad' to it + // -> no match found => add field 'foo.bar.baz.bad' to parent + String fullPathToMapper = name.substring(0, name.lastIndexOf(mapper.leafName())); + String[] fullPathTokens = fullPathToMapper.split("\\."); + StringBuilder candidateObject = new StringBuilder(); + String candidateObjectPrefix = prefix == null ? "" : prefix + "."; + for (int i = 0; i < fullPathTokens.length; i++) { + if (candidateObject.isEmpty() == false) { + candidateObject.append("."); + } + candidateObject.append(fullPathTokens[i]); + String candidateFullObject = candidateObjectPrefix.isEmpty() + ? candidateObject.toString() + : candidateObjectPrefix + candidateObject.toString(); + ObjectMapper parent = context.findObject(candidateFullObject); + if (parent != null) { + var parentBuilder = parent.newBuilder(context.indexSettings().getIndexVersionCreated()); + parentBuilder.addDynamic(name.substring(candidateObject.length() + 1), candidateFullObject, mapper, context); + if (parentBuilder.mappersBuilders.isEmpty() == false) { + add(parentBuilder); + } + return; + } + } - private static Builder findObjectBuilder(String fullName, DocumentParserContext context) { - // does the object mapper already exist? if so, use that - ObjectMapper objectMapper = context.mappingLookup().objectMappers().get(fullName); - if (objectMapper != null) { - return objectMapper.newBuilder(context.indexSettings().getIndexVersionCreated()); + // No matching parent object was found, the mapper is added as a leaf - similar to subobjects false. + // This only applies to field mappers, as subobjects get auto-flattened. + if (mapper instanceof FieldMapper fieldMapper) { + FieldMapper.Builder fieldBuilder = fieldMapper.getMergeBuilder(); + fieldBuilder.setLeafName(name); // Update to reflect the current, possibly flattened name. + add(fieldBuilder); + } + return; } - // has the object mapper been added as a dynamic update already? - objectMapper = context.getDynamicObjectMapper(fullName); - if (objectMapper != null) { - return objectMapper.newBuilder(context.indexSettings().getIndexVersionCreated()); + + // We strip off the first object path of the mapper name, load or create + // the relevant object mapper, and then recurse down into it, passing the remainder + // of the mapper name. So for a mapper 'foo.bar.baz', we locate 'foo' and then + // call addDynamic on it with the name 'bar.baz', and next call addDynamic on 'bar' with the name 'baz'. + int firstDotIndex = name.indexOf('.'); + String immediateChild = name.substring(0, firstDotIndex); + String immediateChildFullName = prefix == null ? immediateChild : prefix + "." + immediateChild; + Builder parentBuilder = context.findObjectBuilder(immediateChildFullName); + if (parentBuilder != null) { + parentBuilder.addDynamic(name.substring(firstDotIndex + 1), immediateChildFullName, mapper, context); + add(parentBuilder); + } else { + // Expected to find a matching parent object but got null. + throw new IllegalStateException("Missing intermediate object " + immediateChildFullName); } - // no object mapper found - return null; + } protected final Map buildMappers(MapperBuilderContext mapperBuilderContext) { @@ -227,9 +270,10 @@ protected final Map buildMappers(MapperBuilderContext mapperBuil // mix of object notation and dot notation. mapper = existing.merge(mapper, MapperMergeContext.from(mapperBuilderContext, Long.MAX_VALUE)); } - if (subobjects.isPresent() && subobjects.get() == Subobjects.DISABLED && mapper instanceof ObjectMapper objectMapper) { - // We're parsing a mapping that has set `subobjects: false` but has defined sub-objects - objectMapper.asFlattenedFieldMappers(mapperBuilderContext).forEach(m -> mappers.put(m.leafName(), m)); + if (mapper instanceof ObjectMapper objectMapper && isFlatteningCandidate(subobjects, objectMapper)) { + // We're parsing a mapping that has defined sub-objects, may need to flatten them. + objectMapper.asFlattenedFieldMappers(mapperBuilderContext, throwOnFlattenableError(subobjects)) + .forEach(m -> mappers.put(m.leafName(), m)); } else { mappers.put(mapper.leafName(), mapper); } @@ -624,12 +668,11 @@ private static Map buildMergedMappers( Optional subobjects ) { Map mergedMappers = new HashMap<>(); + var context = objectMergeContext.getMapperBuilderContext(); for (Mapper childOfExistingMapper : existing.mappers.values()) { - if (subobjects.isPresent() - && subobjects.get() == Subobjects.DISABLED - && childOfExistingMapper instanceof ObjectMapper objectMapper) { - // An existing mapping with sub-objects is merged with a mapping that has set `subobjects: false` - objectMapper.asFlattenedFieldMappers(objectMergeContext.getMapperBuilderContext()) + if (childOfExistingMapper instanceof ObjectMapper objectMapper && isFlatteningCandidate(subobjects, objectMapper)) { + // An existing mapping with sub-objects is merged with a mapping that has `subobjects` set to false or auto. + objectMapper.asFlattenedFieldMappers(context, throwOnFlattenableError(subobjects)) .forEach(m -> mergedMappers.put(m.leafName(), m)); } else { putMergedMapper(mergedMappers, childOfExistingMapper); @@ -638,11 +681,9 @@ private static Map buildMergedMappers( for (Mapper mergeWithMapper : mergeWithObject) { Mapper mergeIntoMapper = mergedMappers.get(mergeWithMapper.leafName()); if (mergeIntoMapper == null) { - if (subobjects.isPresent() - && subobjects.get() == Subobjects.DISABLED - && mergeWithMapper instanceof ObjectMapper objectMapper) { - // An existing mapping that has set `subobjects: false` is merged with a mapping with sub-objects - objectMapper.asFlattenedFieldMappers(objectMergeContext.getMapperBuilderContext()) + if (mergeWithMapper instanceof ObjectMapper objectMapper && isFlatteningCandidate(subobjects, objectMapper)) { + // An existing mapping with `subobjects` set to false or auto is merged with a mapping with sub-objects + objectMapper.asFlattenedFieldMappers(context, throwOnFlattenableError(subobjects)) .stream() .filter(m -> objectMergeContext.decrementFieldBudgetIfPossible(m.getTotalFieldsCount())) .forEach(m -> putMergedMapper(mergedMappers, m)); @@ -699,57 +740,83 @@ private static ObjectMapper truncateObjectMapper(MapperMergeContext context, Obj * * @throws IllegalArgumentException if the mapper cannot be flattened */ - List asFlattenedFieldMappers(MapperBuilderContext context) { - List flattenedMappers = new ArrayList<>(); + List asFlattenedFieldMappers(MapperBuilderContext context, boolean throwOnFlattenableError) { + List flattenedMappers = new ArrayList<>(); ContentPath path = new ContentPath(); - asFlattenedFieldMappers(context, flattenedMappers, path); + asFlattenedFieldMappers(context, flattenedMappers, path, throwOnFlattenableError); return flattenedMappers; } - private void asFlattenedFieldMappers(MapperBuilderContext context, List flattenedMappers, ContentPath path) { - ensureFlattenable(context, path); + static boolean isFlatteningCandidate(Optional subobjects, ObjectMapper mapper) { + return subobjects.isPresent() && subobjects.get() != Subobjects.ENABLED && mapper instanceof NestedObjectMapper == false; + } + + private static boolean throwOnFlattenableError(Optional subobjects) { + return subobjects.isPresent() && subobjects.get() == Subobjects.DISABLED; + } + + private void asFlattenedFieldMappers( + MapperBuilderContext context, + List flattenedMappers, + ContentPath path, + boolean throwOnFlattenableError + ) { + var error = checkFlattenable(context); + if (error.isPresent()) { + if (throwOnFlattenableError) { + throw new IllegalArgumentException( + "Object mapper [" + + path.pathAsText(leafName()) + + "] was found in a context where subobjects is set to false. " + + "Auto-flattening [" + + path.pathAsText(leafName()) + + "] failed because " + + error.get() + ); + } + // The object can't be auto-flattened under the parent object, so it gets added at the current level. + // [subobjects=auto] applies auto-flattening to names, so the leaf name may need to change. + // Since mapper objects are immutable, we create a clone of the current one with the updated leaf name. + flattenedMappers.add( + path.pathAsText("").isEmpty() + ? this + : new ObjectMapper(path.pathAsText(leafName()), fullPath, enabled, subobjects, storeArraySource, dynamic, mappers) + ); + return; + } path.add(leafName()); for (Mapper mapper : mappers.values()) { if (mapper instanceof FieldMapper fieldMapper) { FieldMapper.Builder fieldBuilder = fieldMapper.getMergeBuilder(); fieldBuilder.setLeafName(path.pathAsText(mapper.leafName())); flattenedMappers.add(fieldBuilder.build(context)); - } else if (mapper instanceof ObjectMapper objectMapper) { - objectMapper.asFlattenedFieldMappers(context, flattenedMappers, path); + } else if (mapper instanceof ObjectMapper objectMapper && mapper instanceof NestedObjectMapper == false) { + objectMapper.asFlattenedFieldMappers(context, flattenedMappers, path, throwOnFlattenableError); } } path.remove(); } - private void ensureFlattenable(MapperBuilderContext context, ContentPath path) { - if (dynamic != null && context.getDynamic() != dynamic) { - throwAutoFlatteningException( - path, + Optional checkFlattenable(MapperBuilderContext context) { + if (dynamic != null && (context == null || context.getDynamic() != dynamic)) { + return Optional.of( "the value of [dynamic] (" + dynamic + ") is not compatible with the value from its parent context (" - + context.getDynamic() + + (context != null ? context.getDynamic() : "") + ")" ); } + if (storeArraySource()) { + return Optional.of("the value of [store_array_source] is [true]"); + } if (isEnabled() == false) { - throwAutoFlatteningException(path, "the value of [enabled] is [false]"); + return Optional.of("the value of [enabled] is [false]"); } - if (subobjects.isPresent() && subobjects.get() == Subobjects.ENABLED) { - throwAutoFlatteningException(path, "the value of [subobjects] is [true]"); + if (subobjects.isPresent() && subobjects.get() != Subobjects.DISABLED) { + return Optional.of("the value of [subobjects] is [" + subobjects().printedValue + "]"); } - } - - private void throwAutoFlatteningException(ContentPath path, String reason) { - throw new IllegalArgumentException( - "Object mapper [" - + path.pathAsText(leafName()) - + "] was found in a context where subobjects is set to false. " - + "Auto-flattening [" - + path.pathAsText(leafName()) - + "] failed because " - + reason - ); + return Optional.empty(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 4c861c2320ea5..806f28d72647a 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -50,9 +50,9 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { public static final ParseField COLLAPSE_FIELD = new ParseField("collapse"); public static final ParseField FIELD_FIELD = new ParseField("field"); + public static final int DEFAULT_FROM = 0; + public static final int DEFAULT_SIZE = 3; private static final boolean DEFAULT_IGNORE_UNAMPPED = false; - private static final int DEFAULT_FROM = 0; - private static final int DEFAULT_SIZE = 3; private static final boolean DEFAULT_VERSION = false; private static final boolean DEFAULT_SEQ_NO_AND_PRIMARY_TERM = false; private static final boolean DEFAULT_EXPLAIN = false; diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchIndexNameMatcher.java b/server/src/main/java/org/elasticsearch/index/query/SearchIndexNameMatcher.java index 9e34093776fb2..6799895d8e278 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchIndexNameMatcher.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchIndexNameMatcher.java @@ -53,14 +53,12 @@ public SearchIndexNameMatcher( * the separator ':', and must match on both the cluster alias and index name. */ public boolean test(String pattern) { - int separatorIndex = pattern.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); - if (separatorIndex < 0) { + String[] splitIndex = RemoteClusterAware.splitIndexName(pattern); + + if (splitIndex[0] == null) { return clusterAlias == null && matchesIndex(pattern); } else { - String clusterPattern = pattern.substring(0, separatorIndex); - String indexPattern = pattern.substring(separatorIndex + 1); - - return Regex.simpleMatch(clusterPattern, clusterAlias) && matchesIndex(indexPattern); + return Regex.simpleMatch(splitIndex[0], clusterAlias) && matchesIndex(splitIndex[1]); } } diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java index 92e71c08cf056..1edd69a6443a7 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java @@ -24,12 +24,12 @@ public abstract class SlicedInputStream extends InputStream { private int nextSlice = 0; private InputStream currentStream; - private int currentSliceOffset = 0; + private long currentSliceOffset = 0; private final int numSlices; private boolean closed = false; private boolean initialized = false; private int markedSlice = -1; - private int markedSliceOffset = -1; + private long markedSliceOffset = -1; /** * Creates a new SlicedInputStream @@ -98,6 +98,30 @@ public final int read(byte[] buffer, int offset, int length) throws IOException return read; } + @Override + public long skip(long n) throws IOException { + long remaining = n; + while (remaining > 0) { + final InputStream stream = currentStream(); + if (stream == null) { + break; + } + long skipped = stream.skip(remaining); + currentSliceOffset += skipped; + if (skipped < remaining) { + // read one more byte to see if we reached EOF in order to proceed to the next stream. + if (stream.read() < 0) { + nextStream(); + } else { + currentSliceOffset++; + skipped++; + } + } + remaining -= skipped; + } + return n - remaining; + } + @Override public void close() throws IOException { closed = true; @@ -129,7 +153,7 @@ public void mark(int readLimit) { // According to JDK documentation, marking a closed InputStream should have no effect. if (markSupported() && isClosed() == false && numSlices > 0) { if (initialized) { - markedSlice = nextSlice - 1; + markedSlice = (currentStream == null) ? numSlices : nextSlice - 1; markedSliceOffset = currentSliceOffset; } else { markedSlice = 0; @@ -148,12 +172,16 @@ public void reset() throws IOException { throw new IOException("Mark has not been set"); } - // We do not call the SlicedInputStream's skipNBytes but call skipNBytes directly on the returned stream, to ensure that - // the skip is performed on the marked slice and no other slices are involved. This may help uncover any bugs. nextSlice = markedSlice; - final InputStream stream = nextStream(); - if (stream != null) { - stream.skipNBytes(markedSliceOffset); + initialized = true; + IOUtils.close(currentStream); + if (nextSlice < numSlices) { + currentStream = openSlice(nextSlice++); + // We do not call the SlicedInputStream's skipNBytes but call skipNBytes directly on the returned stream, to ensure that + // the skip is performed on the marked slice and no other slices are involved. This may help uncover any bugs. + currentStream.skipNBytes(markedSliceOffset); + } else { + currentStream = null; } currentSliceOffset = markedSliceOffset; } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index f677f75dfb5ae..854c58b4f57ad 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -85,6 +85,7 @@ void parseRequestConfig( * @param model The model * @param query Inference query, mainly for re-ranking * @param input Inference input + * @param stream Stream inference results * @param taskSettings Settings in the request to override the model's defaults * @param inputType For search, ingest etc * @param timeout The timeout for the request @@ -94,6 +95,7 @@ void infer( Model model, @Nullable String query, List input, + boolean stream, Map taskSettings, InputType inputType, TimeValue timeout, diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index bfb2d3cc9aac8..7fd3459e010c7 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -35,28 +35,18 @@ * ActionType that is used by executor node to indicate that the persistent action finished or failed on the node and needs to be * removed from the cluster state in case of successful completion or restarted on some other node in case of failure. */ -public class CompletionPersistentTaskAction extends ActionType { +public class CompletionPersistentTaskAction { - public static final CompletionPersistentTaskAction INSTANCE = new CompletionPersistentTaskAction(); - public static final String NAME = "cluster:admin/persistent/completion"; + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/persistent/completion"); - private CompletionPersistentTaskAction() { - super(NAME); - } + private CompletionPersistentTaskAction() {/* no instances */} public static class Request extends MasterNodeRequest { - private String taskId; - - private Exception exception; - - private long allocationId = -1; - - private String localAbortReason; - - public Request() { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); - } + private final String taskId; + private final Exception exception; + private final long allocationId; + private final String localAbortReason; public Request(StreamInput in) throws IOException { super(in); @@ -129,7 +119,7 @@ public TransportAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - CompletionPersistentTaskAction.NAME, + INSTANCE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index 5453f07c0ca00..86f9f981b2c7e 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -29,22 +29,15 @@ import java.io.IOException; import java.util.Objects; -public class RemovePersistentTaskAction extends ActionType { +public class RemovePersistentTaskAction { - public static final RemovePersistentTaskAction INSTANCE = new RemovePersistentTaskAction(); - public static final String NAME = "cluster:admin/persistent/remove"; + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/persistent/remove"); - private RemovePersistentTaskAction() { - super(NAME); - } + private RemovePersistentTaskAction() {/* no instances */} public static class Request extends MasterNodeRequest { - private String taskId; - - public Request() { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); - } + private final String taskId; public Request(StreamInput in) throws IOException { super(in); @@ -56,10 +49,6 @@ public Request(String taskId) { this.taskId = taskId; } - public void setTaskId(String taskId) { - this.taskId = taskId; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -99,7 +88,7 @@ public TransportAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - RemovePersistentTaskAction.NAME, + INSTANCE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 155bee21a310e..fd3d87c21ab43 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -35,26 +34,16 @@ /** * This action can be used to add the record for the persistent action to the cluster state. */ -public class StartPersistentTaskAction extends ActionType { +public class StartPersistentTaskAction { - public static final StartPersistentTaskAction INSTANCE = new StartPersistentTaskAction(); - public static final String NAME = "cluster:admin/persistent/start"; + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/persistent/start"); - private StartPersistentTaskAction() { - super(NAME); - } + private StartPersistentTaskAction() {/* no instances */} public static class Request extends MasterNodeRequest { - - private String taskId; - - private String taskName; - - private PersistentTaskParams params; - - public Request() { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); - } + private final String taskId; + private final String taskName; + private final PersistentTaskParams params; public Request(StreamInput in) throws IOException { super(in); @@ -117,27 +106,13 @@ public String getTaskName() { return taskName; } - public void setTaskName(String taskName) { - this.taskName = taskName; - } - public String getTaskId() { return taskId; } - public void setTaskId(String taskId) { - this.taskId = taskId; - } - public PersistentTaskParams getParams() { return params; } - - @Nullable - public void setParams(PersistentTaskParams params) { - this.params = params; - } - } public static class TransportAction extends TransportMasterNodeAction { @@ -156,7 +131,7 @@ public TransportAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - StartPersistentTaskAction.NAME, + INSTANCE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index a8a076c642d8c..268522441f520 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -31,24 +31,16 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class UpdatePersistentTaskStatusAction extends ActionType { +public class UpdatePersistentTaskStatusAction { - public static final UpdatePersistentTaskStatusAction INSTANCE = new UpdatePersistentTaskStatusAction(); - public static final String NAME = "cluster:admin/persistent/update_status"; + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/persistent/update_status"); - private UpdatePersistentTaskStatusAction() { - super(NAME); - } + private UpdatePersistentTaskStatusAction() {/* no instances */} public static class Request extends MasterNodeRequest { - - private String taskId; - private long allocationId = -1L; - private PersistentTaskState state; - - public Request() { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); - } + private final String taskId; + private final long allocationId; + private final PersistentTaskState state; public Request(StreamInput in) throws IOException { super(in); @@ -64,26 +56,14 @@ public Request(String taskId, long allocationId, PersistentTaskState state) { this.state = state; } - public void setTaskId(String taskId) { - this.taskId = taskId; - } - public String getTaskId() { return taskId; } - public void setAllocationId(long allocationId) { - this.allocationId = allocationId; - } - public long getAllocationId() { return allocationId; } - public void setState(PersistentTaskState state) { - this.state = state; - } - public PersistentTaskState getState() { return state; } @@ -138,7 +118,7 @@ public TransportAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - UpdatePersistentTaskStatusAction.NAME, + INSTANCE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index b7dbe09db4a39..ede295fee9f4d 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -195,8 +195,9 @@ private RouteBuilder(Method method, String path) { } /** - * Marks that the route being built has been deprecated (for some reason -- the deprecationMessage), and notes the major - * version in which that deprecation occurred. + * Marks that the route being built has been deprecated (for some reason -- the deprecationMessage) for removal. Notes the last + * major version in which the path is fully supported without compatibility headers. If this path is being replaced by another + * then use {@link #replaces(Method, String, RestApiVersion)} instead. *

* For example: *

 {@code
@@ -205,55 +206,57 @@ private RouteBuilder(Method method, String path) {
              *  .build()}
* * @param deprecationMessage the user-visible explanation of this deprecation - * @param deprecatedInVersion the major version in which the deprecation occurred + * @param lastFullySupportedVersion the last {@link RestApiVersion} (i.e. 7) for which this route is fully supported. + * The next major version (i.e. 8) will require compatibility header(s). (;compatible-with=7) + * The next major version (i.e. 9) will have no support whatsoever for this route. * @return a reference to this object. */ - public RouteBuilder deprecated(String deprecationMessage, RestApiVersion deprecatedInVersion) { + public RouteBuilder deprecated(String deprecationMessage, RestApiVersion lastFullySupportedVersion) { assert this.replacedRoute == null; - this.restApiVersion = Objects.requireNonNull(deprecatedInVersion); + this.restApiVersion = Objects.requireNonNull(lastFullySupportedVersion); this.deprecationMessage = Objects.requireNonNull(deprecationMessage); return this; } /** - * Marks that the route being built has been deprecated (for some reason -- the deprecationMessage), and notes the major - * version in which that deprecation occurred. + * Marks that the route being built replaces another route, and notes the last major version in which the path is fully + * supported without compatibility headers. *

* For example: *

 {@code
-             * Route.builder(GET, "_upgrade")
-             *  .deprecated("The _upgrade API is no longer useful and will be removed.", RestApiVersion.V_7)
-             *  .build()}
+ * Route.builder(GET, "/_security/user/") + * .replaces(GET, "/_xpack/security/user/", RestApiVersion.V_7).build()} * - * @param deprecationMessage the user-visible explanation of this deprecation - * @param deprecationLevel the level at which to log the deprecation - * @param deprecatedInVersion the major version in which the deprecation occurred + * @param method the method being replaced + * @param path the path being replaced + * @param lastFullySupportedVersion the last {@link RestApiVersion} (i.e. 7) for which this route is fully supported. + * The next major version (i.e. 8) will require compatibility header(s). (;compatible-with=7) + * The next major version (i.e. 9) will have no support whatsoever for this route. * @return a reference to this object. */ - public RouteBuilder deprecated(String deprecationMessage, Level deprecationLevel, RestApiVersion deprecatedInVersion) { - assert this.replacedRoute == null; - this.restApiVersion = Objects.requireNonNull(deprecatedInVersion); - this.deprecationMessage = Objects.requireNonNull(deprecationMessage); - this.deprecationLevel = deprecationLevel; + public RouteBuilder replaces(Method method, String path, RestApiVersion lastFullySupportedVersion) { + assert this.deprecationMessage == null; + this.replacedRoute = new Route(method, path, lastFullySupportedVersion, null, null, null); return this; } /** - * Marks that the route being built replaces another route, and notes the major version in which that replacement occurred. + * Marks that the route being built has been deprecated (for some reason -- the deprecationMessage), but will not be removed. *

* For example: *

 {@code
-             * Route.builder(GET, "/_security/user/")
-             *   .replaces(GET, "/_xpack/security/user/", RestApiVersion.V_7).build()}
+ * Route.builder(GET, "_upgrade") + * .deprecated("The _upgrade API is no longer useful but will not be removed.") + * .build()} * - * @param method the method being replaced - * @param path the path being replaced - * @param replacedInVersion the major version in which the replacement occurred + * @param deprecationMessage the user-visible explanation of this deprecation * @return a reference to this object. */ - public RouteBuilder replaces(Method method, String path, RestApiVersion replacedInVersion) { - assert this.deprecationMessage == null; - this.replacedRoute = new Route(method, path, replacedInVersion, null, null, null); + public RouteBuilder deprecateAndKeep(String deprecationMessage) { + assert this.replacedRoute == null; + this.restApiVersion = RestApiVersion.current(); + this.deprecationMessage = Objects.requireNonNull(deprecationMessage); + this.deprecationLevel = Level.WARN; return this; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java index ad405656631b0..5c8e5928678c3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java @@ -54,9 +54,12 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli ? new NodesCapabilitiesRequest(client.getLocalNodeId()) : new NodesCapabilitiesRequest(); + // Handle the 'path' parameter, use "/" as default if not provided + String path = URLDecoder.decode(request.param("path", "/"), StandardCharsets.UTF_8); + NodesCapabilitiesRequest r = requestNodes.timeout(getTimeout(request)) .method(RestRequest.Method.valueOf(request.param("method", "GET"))) - .path(URLDecoder.decode(request.param("path"), StandardCharsets.UTF_8)) + .path(path) .parameters(request.paramAsStringArray("parameters", Strings.EMPTY_ARRAY)) .capabilities(request.paramAsStringArray("capabilities", Strings.EMPTY_ARRAY)) .restApiVersion(request.getRestApiVersion()); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index 362713a8f48cb..f70d9351e69c9 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.rest.action.admin.indices; -import org.apache.logging.log4j.Level; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; @@ -41,8 +40,8 @@ public class RestPutIndexTemplateAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(POST, "/_template/{name}").deprecated(DEPRECATION_WARNING, Level.WARN, DEPRECATION_VERSION).build(), - Route.builder(PUT, "/_template/{name}").deprecated(DEPRECATION_WARNING, Level.WARN, DEPRECATION_VERSION).build() + Route.builder(POST, "/_template/{name}").deprecateAndKeep(DEPRECATION_WARNING).build(), + Route.builder(PUT, "/_template/{name}").deprecateAndKeep(DEPRECATION_WARNING).build() ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestKnnSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestKnnSearchAction.java index dd868b8321f1d..9b9be199eedae 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestKnnSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestKnnSearchAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestCancellableNodeClient; @@ -34,11 +34,18 @@ public class RestKnnSearchAction extends BaseRestHandler { public RestKnnSearchAction() {} + @UpdateForV9 // these routes were ".deprecated" in RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // this API in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { + return List.of( - Route.builder(GET, "{index}/_knn_search").deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_8).build(), - Route.builder(POST, "{index}/_knn_search").deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_8).build() + // Route.builder(GET, "{index}/_knn_search").deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_8).build(), + // Route.builder(POST, "{index}/_knn_search").deprecated(DEPRECATION_MESSAGE, RestApiVersion.V_8).build() + Route.builder(GET, "{index}/_knn_search").deprecateAndKeep(DEPRECATION_MESSAGE).build(), + Route.builder(POST, "{index}/_knn_search").deprecateAndKeep(DEPRECATION_MESSAGE).build() ); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchSortValues.java b/server/src/main/java/org/elasticsearch/search/SearchSortValues.java index eb9fa935d2374..d134d0664e3c7 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchSortValues.java +++ b/server/src/main/java/org/elasticsearch/search/SearchSortValues.java @@ -13,11 +13,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.search.SearchHit.Fields; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Arrays; @@ -84,11 +82,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static SearchSortValues fromXContent(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); - return new SearchSortValues(parser.list().toArray()); - } - /** * Returns the formatted version of the values that sorting was performed against */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index a5a7cdeaaae5c..9ed62add775c0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -8,8 +8,6 @@ */ package org.elasticsearch.search.aggregations; -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.DelayableWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -22,21 +20,18 @@ import org.elasticsearch.search.sort.SortValue; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.AbstractList; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject; /** * Represents a set of {@link InternalAggregation}s @@ -52,7 +47,7 @@ public final class InternalAggregations implements Iterable /** * Constructs a new aggregation. */ - private InternalAggregations(List aggregations) { + public InternalAggregations(List aggregations) { this.aggregations = aggregations; if (aggregations.isEmpty()) { aggregationsAsMap = Map.of(); @@ -126,27 +121,6 @@ public XContentBuilder toXContentInternal(XContentBuilder builder, Params params return builder; } - public static InternalAggregations fromXContent(XContentParser parser) throws IOException { - final List aggregations = new ArrayList<>(); - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.START_OBJECT) { - SetOnce typedAgg = new SetOnce<>(); - String currentField = parser.currentName(); - parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, InternalAggregation.class, typedAgg::set); - if (typedAgg.get() != null) { - aggregations.add(typedAgg.get()); - } else { - throw new ParsingException( - parser.getTokenLocation(), - String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField) - ); - } - } - } - return new InternalAggregations(aggregations); - } - public static InternalAggregations from(List aggregations) { if (aggregations.isEmpty()) { return EMPTY; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/EmptyTDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/EmptyTDigestState.java index 6ae9c655df3e8..56ac38a70cf07 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/EmptyTDigestState.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/EmptyTDigestState.java @@ -9,12 +9,10 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; - public final class EmptyTDigestState extends TDigestState { public EmptyTDigestState() { // Use the sorting implementation to minimize memory allocation. - super(WrapperTDigestArrays.INSTANCE, Type.SORTING, 1.0D); + super(MemoryTrackingTDigestArrays.INSTANCE, Type.SORTING, 1.0D); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MemoryTrackingTDigestArrays.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MemoryTrackingTDigestArrays.java new file mode 100644 index 0000000000000..e99bfbfe534c8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MemoryTrackingTDigestArrays.java @@ -0,0 +1,401 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.aggregations.metrics; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.tdigest.arrays.TDigestArrays; +import org.elasticsearch.tdigest.arrays.TDigestByteArray; +import org.elasticsearch.tdigest.arrays.TDigestDoubleArray; +import org.elasticsearch.tdigest.arrays.TDigestIntArray; +import org.elasticsearch.tdigest.arrays.TDigestLongArray; + +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * TDigestArrays with raw arrays and circuit breaking. + */ +public class MemoryTrackingTDigestArrays implements TDigestArrays { + + /** + * Default no-op CB instance of the wrapper. + * + * @deprecated This instance shouldn't be used, and will be removed after all usages are replaced. + */ + @Deprecated + public static final MemoryTrackingTDigestArrays INSTANCE = new MemoryTrackingTDigestArrays( + new NoopCircuitBreaker("default-wrapper-tdigest-arrays") + ); + + private final CircuitBreaker breaker; + + public MemoryTrackingTDigestArrays(CircuitBreaker breaker) { + this.breaker = breaker; + } + + @Override + public MemoryTrackingTDigestDoubleArray newDoubleArray(int initialSize) { + breaker.addEstimateBytesAndMaybeBreak( + MemoryTrackingTDigestDoubleArray.estimatedRamBytesUsed(initialSize), + "tdigest-new-double-array" + ); + return new MemoryTrackingTDigestDoubleArray(breaker, initialSize); + } + + @Override + public MemoryTrackingTDigestIntArray newIntArray(int initialSize) { + breaker.addEstimateBytesAndMaybeBreak(MemoryTrackingTDigestIntArray.estimatedRamBytesUsed(initialSize), "tdigest-new-int-array"); + return new MemoryTrackingTDigestIntArray(breaker, initialSize); + } + + @Override + public TDigestLongArray newLongArray(int initialSize) { + breaker.addEstimateBytesAndMaybeBreak(MemoryTrackingTDigestLongArray.estimatedRamBytesUsed(initialSize), "tdigest-new-long-array"); + return new MemoryTrackingTDigestLongArray(breaker, initialSize); + } + + @Override + public TDigestByteArray newByteArray(int initialSize) { + breaker.addEstimateBytesAndMaybeBreak(MemoryTrackingTDigestByteArray.estimatedRamBytesUsed(initialSize), "tdigest-new-byte-array"); + return new MemoryTrackingTDigestByteArray(breaker, initialSize); + } + + public MemoryTrackingTDigestDoubleArray newDoubleArray(double[] array) { + breaker.addEstimateBytesAndMaybeBreak( + MemoryTrackingTDigestDoubleArray.estimatedRamBytesUsed(array.length), + "tdigest-new-double-array" + ); + return new MemoryTrackingTDigestDoubleArray(breaker, array); + } + + public MemoryTrackingTDigestIntArray newIntArray(int[] array) { + breaker.addEstimateBytesAndMaybeBreak(MemoryTrackingTDigestIntArray.estimatedRamBytesUsed(array.length), "tdigest-new-int-array"); + return new MemoryTrackingTDigestIntArray(breaker, array); + } + + private static long estimatedArraySize(long arrayLength, long bytesPerElement) { + return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + arrayLength * bytesPerElement); + } + + private abstract static class AbstractMemoryTrackingArray implements Releasable, Accountable { + protected final CircuitBreaker breaker; + private final AtomicBoolean closed = new AtomicBoolean(false); + + AbstractMemoryTrackingArray(CircuitBreaker breaker) { + this.breaker = breaker; + } + + @Override + public final void close() { + if (closed.compareAndSet(false, true)) { + breaker.addWithoutBreaking(-ramBytesUsed()); + } + } + } + + public static class MemoryTrackingTDigestDoubleArray extends AbstractMemoryTrackingArray implements TDigestDoubleArray { + static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(MemoryTrackingTDigestDoubleArray.class); + + private double[] array; + private int size; + + public MemoryTrackingTDigestDoubleArray(CircuitBreaker breaker, int initialSize) { + this(breaker, new double[initialSize]); + } + + public MemoryTrackingTDigestDoubleArray(CircuitBreaker breaker, double[] array) { + super(breaker); + this.array = array; + this.size = array.length; + } + + public static long estimatedRamBytesUsed(int size) { + return SHALLOW_SIZE + estimatedArraySize(size, Double.BYTES); + } + + @Override + public long ramBytesUsed() { + return estimatedRamBytesUsed(array.length); + } + + @Override + public int size() { + return size; + } + + @Override + public double get(int index) { + assert index >= 0 && index < size; + return array[index]; + } + + @Override + public void set(int index, double value) { + assert index >= 0 && index < size; + array[index] = value; + } + + @Override + public void add(double value) { + ensureCapacity(size + 1); + array[size++] = value; + } + + @Override + public void sort() { + Arrays.sort(array, 0, size); + } + + @Override + public void resize(int newSize) { + ensureCapacity(newSize); + + if (newSize > size) { + Arrays.fill(array, size, newSize, 0); + } + + size = newSize; + } + + @Override + public void ensureCapacity(int requiredCapacity) { + if (requiredCapacity > array.length) { + double[] oldArray = array; + // Used for used bytes assertion + long oldRamBytesUsed = ramBytesUsed(); + long oldArraySize = RamUsageEstimator.sizeOf(oldArray); + + int newSize = ArrayUtil.oversize(requiredCapacity, Double.BYTES); + long newArraySize = estimatedArraySize(newSize, Double.BYTES); + breaker.addEstimateBytesAndMaybeBreak(newArraySize, "tdigest-new-capacity-double-array"); + array = Arrays.copyOf(array, newSize); + breaker.addWithoutBreaking(-RamUsageEstimator.sizeOf(oldArray)); + + assert ramBytesUsed() - oldRamBytesUsed == newArraySize - oldArraySize + : "ramBytesUsed() should be aligned with manual array calculations"; + } + } + } + + public static class MemoryTrackingTDigestIntArray extends AbstractMemoryTrackingArray implements TDigestIntArray { + static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(MemoryTrackingTDigestIntArray.class); + + private int[] array; + private int size; + + public MemoryTrackingTDigestIntArray(CircuitBreaker breaker, int initialSize) { + this(breaker, new int[initialSize]); + } + + public MemoryTrackingTDigestIntArray(CircuitBreaker breaker, int[] array) { + super(breaker); + this.array = array; + this.size = array.length; + } + + public static long estimatedRamBytesUsed(int size) { + return SHALLOW_SIZE + estimatedArraySize(size, Integer.BYTES); + } + + @Override + public long ramBytesUsed() { + return estimatedRamBytesUsed(array.length); + } + + @Override + public int size() { + return size; + } + + @Override + public int get(int index) { + assert index >= 0 && index < size; + return array[index]; + } + + @Override + public void set(int index, int value) { + assert index >= 0 && index < size; + array[index] = value; + } + + @Override + public void resize(int newSize) { + ensureCapacity(newSize); + if (newSize > size) { + Arrays.fill(array, size, newSize, 0); + } + size = newSize; + } + + private void ensureCapacity(int requiredCapacity) { + if (requiredCapacity > array.length) { + int[] oldArray = array; + // Used for used bytes assertion + long oldRamBytesUsed = ramBytesUsed(); + long oldArraySize = RamUsageEstimator.sizeOf(oldArray); + + int newSize = ArrayUtil.oversize(requiredCapacity, Integer.BYTES); + long newArraySize = estimatedArraySize(newSize, Integer.BYTES); + breaker.addEstimateBytesAndMaybeBreak(newArraySize, "tdigest-new-capacity-int-array"); + array = Arrays.copyOf(array, newSize); + breaker.addWithoutBreaking(-RamUsageEstimator.sizeOf(oldArray)); + + assert ramBytesUsed() - oldRamBytesUsed == newArraySize - oldArraySize + : "ramBytesUsed() should be aligned with manual array calculations"; + } + } + } + + public static class MemoryTrackingTDigestLongArray extends AbstractMemoryTrackingArray implements TDigestLongArray { + static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(MemoryTrackingTDigestLongArray.class); + + private long[] array; + private int size; + + public MemoryTrackingTDigestLongArray(CircuitBreaker breaker, int initialSize) { + this(breaker, new long[initialSize]); + } + + public MemoryTrackingTDigestLongArray(CircuitBreaker breaker, long[] array) { + super(breaker); + this.array = array; + this.size = array.length; + } + + public static long estimatedRamBytesUsed(int size) { + return SHALLOW_SIZE + estimatedArraySize(size, Long.BYTES); + } + + @Override + public long ramBytesUsed() { + return estimatedRamBytesUsed(array.length); + } + + @Override + public int size() { + return size; + } + + @Override + public long get(int index) { + assert index >= 0 && index < size; + return array[index]; + } + + @Override + public void set(int index, long value) { + assert index >= 0 && index < size; + array[index] = value; + } + + @Override + public void resize(int newSize) { + ensureCapacity(newSize); + if (newSize > size) { + Arrays.fill(array, size, newSize, 0); + } + size = newSize; + } + + private void ensureCapacity(int requiredCapacity) { + if (requiredCapacity > array.length) { + long[] oldArray = array; + // Used for used bytes assertion + long oldRamBytesUsed = ramBytesUsed(); + long oldArraySize = RamUsageEstimator.sizeOf(oldArray); + + int newSize = ArrayUtil.oversize(requiredCapacity, Long.BYTES); + long newArraySize = estimatedArraySize(newSize, Long.BYTES); + breaker.addEstimateBytesAndMaybeBreak(newArraySize, "tdigest-new-capacity-long-array"); + array = Arrays.copyOf(array, newSize); + breaker.addWithoutBreaking(-RamUsageEstimator.sizeOf(oldArray)); + + assert ramBytesUsed() - oldRamBytesUsed == newArraySize - oldArraySize + : "ramBytesUsed() should be aligned with manual array calculations"; + } + } + } + + public static class MemoryTrackingTDigestByteArray extends AbstractMemoryTrackingArray implements TDigestByteArray { + static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(MemoryTrackingTDigestByteArray.class); + + private byte[] array; + private int size; + + public MemoryTrackingTDigestByteArray(CircuitBreaker breaker, int initialSize) { + this(breaker, new byte[initialSize]); + } + + public MemoryTrackingTDigestByteArray(CircuitBreaker breaker, byte[] array) { + super(breaker); + this.array = array; + this.size = array.length; + } + + public static long estimatedRamBytesUsed(int size) { + return SHALLOW_SIZE + estimatedArraySize(size, Byte.BYTES); + } + + @Override + public long ramBytesUsed() { + return estimatedRamBytesUsed(array.length); + } + + @Override + public int size() { + return size; + } + + @Override + public byte get(int index) { + assert index >= 0 && index < size; + return array[index]; + } + + @Override + public void set(int index, byte value) { + assert index >= 0 && index < size; + array[index] = value; + } + + @Override + public void resize(int newSize) { + ensureCapacity(newSize); + if (newSize > size) { + Arrays.fill(array, size, newSize, (byte) 0); + } + size = newSize; + } + + private void ensureCapacity(int requiredCapacity) { + if (requiredCapacity > array.length) { + byte[] oldArray = array; + // Used for used bytes assertion + long oldRamBytesUsed = ramBytesUsed(); + long oldArraySize = RamUsageEstimator.sizeOf(oldArray); + + int newSize = ArrayUtil.oversize(requiredCapacity, Byte.BYTES); + long newArraySize = estimatedArraySize(newSize, Byte.BYTES); + breaker.addEstimateBytesAndMaybeBreak(newArraySize, "tdigest-new-capacity-byte-array"); + array = Arrays.copyOf(array, newSize); + breaker.addWithoutBreaking(-RamUsageEstimator.sizeOf(oldArray)); + + assert ramBytesUsed() - oldRamBytesUsed == newArraySize - oldArraySize + : "ramBytesUsed() should be aligned with manual array calculations"; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java index 48bdb59e430a5..78ef81684a256 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java @@ -11,10 +11,11 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tdigest.Centroid; import org.elasticsearch.tdigest.TDigest; import org.elasticsearch.tdigest.arrays.TDigestArrays; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; import java.io.IOException; import java.util.Collection; @@ -25,7 +26,7 @@ * through factory method params, providing one optimized for performance (e.g. MergingDigest or HybridDigest) by default, or optionally one * that produces highly accurate results regardless of input size but its construction over the sample population takes 2x-10x longer. */ -public class TDigestState { +public class TDigestState implements Releasable { private final double compression; @@ -54,7 +55,7 @@ static Type valueForHighAccuracy() { */ @Deprecated public static TDigestState create(double compression) { - return create(WrapperTDigestArrays.INSTANCE, compression); + return create(MemoryTrackingTDigestArrays.INSTANCE, compression); } /** @@ -81,7 +82,7 @@ public static TDigestState createOptimizedForAccuracy(TDigestArrays arrays, doub */ @Deprecated public static TDigestState create(double compression, TDigestExecutionHint executionHint) { - return create(WrapperTDigestArrays.INSTANCE, compression, executionHint); + return create(MemoryTrackingTDigestArrays.INSTANCE, compression, executionHint); } /** @@ -106,7 +107,7 @@ public static TDigestState create(TDigestArrays arrays, double compression, TDig * @return a TDigestState object */ public static TDigestState createUsingParamsFrom(TDigestState state) { - return new TDigestState(WrapperTDigestArrays.INSTANCE, state.type, state.compression); + return new TDigestState(MemoryTrackingTDigestArrays.INSTANCE, state.type, state.compression); } protected TDigestState(TDigestArrays arrays, Type type, double compression) { @@ -143,7 +144,7 @@ public static void write(TDigestState state, StreamOutput out) throws IOExceptio */ @Deprecated public static TDigestState read(StreamInput in) throws IOException { - return read(WrapperTDigestArrays.INSTANCE, in); + return read(MemoryTrackingTDigestArrays.INSTANCE, in); } public static TDigestState read(TDigestArrays arrays, StreamInput in) throws IOException { @@ -267,4 +268,9 @@ public final double getMin() { public final double getMax() { return tdigest.getMax(); } + + @Override + public void close() { + Releasables.close(tdigest); + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java index d163cafaffe2e..8326342df09f2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.query.CommonTermsQueryBuilder; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.ParseField; @@ -32,6 +33,7 @@ * * @deprecated Only for 7.x rest compat */ +@UpdateForV9 // remove this since it's only for 7.x compat and 7.x compat will be removed in 9.0 @Deprecated public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonTermsQueryBuilder.class); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java index b3211f0b1e31c..17b57645d7d5f 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; /** * Process stored fields loaded from a HitContext into DocumentFields @@ -42,7 +43,8 @@ List process(Map> loadedFields) { if (inputs == null) { return List.of(); } - return inputs.stream().map(ft::valueForDisplay).toList(); + // This is eventually provided to DocumentField, which needs this collection to be mutable + return inputs.stream().map(ft::valueForDisplay).collect(Collectors.toList()); } boolean hasValue(Map> loadedFields) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java index 7aa4126cf9b35..9516465309c39 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java @@ -9,23 +9,17 @@ package org.elasticsearch.search.fetch.subphase.highlight; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.text.Text; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * A field highlighted with its highlighted fragments. */ @@ -74,25 +68,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - public static HighlightField fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - String fieldName = parser.currentName(); - Text[] fragments; - XContentParser.Token token = parser.nextToken(); - if (token == XContentParser.Token.START_ARRAY) { - List values = new ArrayList<>(); - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - values.add(new Text(parser.text())); - } - fragments = values.toArray(Text.EMPTY_ARRAY); - } else if (token == XContentParser.Token.VALUE_NULL) { - fragments = null; - } else { - throw new ParsingException(parser.getTokenLocation(), "unexpected token type [" + token + "]"); - } - return new HighlightField(fieldName, fragments); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(name); diff --git a/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java b/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java index c8af8671ec232..34049fc027ff0 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java @@ -14,11 +14,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.InstantiatingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; @@ -27,22 +25,18 @@ import java.util.Objects; import java.util.concurrent.TimeUnit; -import static java.util.stream.Collectors.toMap; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * The result of a profiled *thing*, like a query or an aggregation. See * {@link AbstractProfiler} for the statistic collection framework. */ public final class ProfileResult implements Writeable, ToXContentObject { - static final ParseField TYPE = new ParseField("type"); - static final ParseField DESCRIPTION = new ParseField("description"); - static final ParseField BREAKDOWN = new ParseField("breakdown"); - static final ParseField DEBUG = new ParseField("debug"); + public static final ParseField TYPE = new ParseField("type"); + public static final ParseField DESCRIPTION = new ParseField("description"); + public static final ParseField BREAKDOWN = new ParseField("breakdown"); + public static final ParseField DEBUG = new ParseField("debug"); static final ParseField NODE_TIME = new ParseField("time"); - static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos"); - static final ParseField CHILDREN = new ParseField("children"); + public static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos"); + public static final ParseField CHILDREN = new ParseField("children"); private final String type; private final String description; @@ -181,28 +175,4 @@ public int hashCode() { public String toString() { return Strings.toString(this); } - - private static final InstantiatingObjectParser PARSER; - static { - InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( - "profile_result", - true, - ProfileResult.class - ); - parser.declareString(constructorArg(), TYPE); - parser.declareString(constructorArg(), DESCRIPTION); - parser.declareObject( - constructorArg(), - (p, c) -> p.map().entrySet().stream().collect(toMap(Map.Entry::getKey, e -> ((Number) e.getValue()).longValue())), - BREAKDOWN - ); - parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), DEBUG); - parser.declareLong(constructorArg(), NODE_TIME_RAW); - parser.declareObjectArray(optionalConstructorArg(), (p, c) -> fromXContent(p), CHILDREN); - PARSER = parser.build(); - } - - public static ProfileResult fromXContent(XContentParser p) throws IOException { - return PARSER.parse(p, null); - } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java index 5c4c7d2ea5574..8227cb5674809 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -143,15 +144,10 @@ static ShardProfileId parseCompositeProfileShardId(String compositeId) { Matcher m = SHARD_ID_DECOMPOSITION.matcher(compositeId); if (m.find()) { String nodeId = m.group(1); - String indexName = m.group(2); + String[] tokens = RemoteClusterAware.splitIndexName(m.group(2)); + String cluster = tokens[0]; + String indexName = tokens[1]; int shardId = Integer.parseInt(m.group(3)); - String cluster = null; - if (indexName.contains(":")) { - // index names and cluster names cannot contain a ':', so this split should be accurate - String[] tokens = indexName.split(":", 2); - cluster = tokens[0]; - indexName = tokens[1]; - } return new ShardProfileId(nodeId, indexName, shardId, cluster); } else { assert false : "Unable to match input against expected pattern of [nodeId][indexName][shardId]. Input: " + compositeId; diff --git a/server/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResult.java index 5223cf969ee04..9d309d34e34eb 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResult.java @@ -16,15 +16,12 @@ import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * A container class to hold the profile results for a single shard in the request. * Contains a list of query profiles, a collector tree and a total rewrite tree. @@ -87,13 +84,4 @@ public String toString() { return Strings.toString(this); } - public static AggregationProfileShardResult fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser); - List aggProfileResults = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - aggProfileResults.add(ProfileResult.fromXContent(parser)); - } - return new AggregationProfileShardResult(aggProfileResults); - } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java index 9b30b7e16b25d..637b2dbe0ba49 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java @@ -19,7 +19,6 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -27,8 +26,6 @@ import java.util.Objects; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * Public interface and serialization container for profiled timings of the * Collectors used in the search. Children CollectorResult's may be @@ -43,11 +40,11 @@ public class CollectorResult extends ProfilerCollectorResult implements ToXConte public static final String REASON_AGGREGATION = "aggregation"; public static final String REASON_AGGREGATION_GLOBAL = "aggregation_global"; - private static final ParseField NAME = new ParseField("name"); - private static final ParseField REASON = new ParseField("reason"); - private static final ParseField TIME = new ParseField("time"); - private static final ParseField TIME_NANOS = new ParseField("time_in_nanos"); - private static final ParseField CHILDREN = new ParseField("children"); + public static final ParseField NAME = new ParseField("name"); + public static final ParseField REASON = new ParseField("reason"); + public static final ParseField TIME = new ParseField("time"); + public static final ParseField TIME_NANOS = new ParseField("time_in_nanos"); + public static final ParseField CHILDREN = new ParseField("children"); public CollectorResult(String collectorName, String reason, long time, List children) { super(collectorName, reason, time, new ArrayList<>(children)); @@ -119,41 +116,4 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par return builder; } - public static CollectorResult fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - String currentFieldName = null; - String name = null, reason = null; - long time = -1; - List children = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (NAME.match(currentFieldName, parser.getDeprecationHandler())) { - name = parser.text(); - } else if (REASON.match(currentFieldName, parser.getDeprecationHandler())) { - reason = parser.text(); - } else if (TIME.match(currentFieldName, parser.getDeprecationHandler())) { - // we need to consume this value, but we use the raw nanosecond value - parser.text(); - } else if (TIME_NANOS.match(currentFieldName, parser.getDeprecationHandler())) { - time = parser.longValue(); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (CHILDREN.match(currentFieldName, parser.getDeprecationHandler())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - children.add(CollectorResult.fromXContent(parser)); - } - } else { - parser.skipChildren(); - } - } else { - parser.skipChildren(); - } - } - return new CollectorResult(name, reason, time, children); - } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index 76b93a2f802ec..0d6b2cf45138b 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -53,6 +53,45 @@ protected static Set getEnabledRemoteClusters(final Settings settings) { return RemoteConnectionStrategy.getRemoteClusters(settings); } + /** + * Check whether the index expression represents remote index or not. + * The index name is assumed to be individual index (no commas) but can contain `-`, wildcards, + * datemath, remote cluster name and any other syntax permissible in index expression component. + */ + public static boolean isRemoteIndexName(String indexExpression) { + if (indexExpression.isEmpty() || indexExpression.charAt(0) == '<' || indexExpression.startsWith("-<")) { + // This is date math, but even if it is not, the remote can't start with '<'. + // Thus, whatever it is, this is definitely not a remote index. + return false; + } + // Note remote index name also can not start with ':' + return indexExpression.indexOf(RemoteClusterService.REMOTE_CLUSTER_INDEX_SEPARATOR) > 0; + } + + /** + * Split the index name into remote cluster alias and index name. + * The index expression is assumed to be individual index (no commas) but can contain `-`, wildcards, + * datemath, remote cluster name and any other syntax permissible in index expression component. + * There's no guarantee the components actually represent existing remote cluster or index, only + * rudimentary checks are done on the syntax. + */ + public static String[] splitIndexName(String indexExpression) { + if (indexExpression.isEmpty() || indexExpression.charAt(0) == '<' || indexExpression.startsWith("-<")) { + // This is date math, but even if it is not, the remote can't start with '<'. + // Thus, whatever it is, this is definitely not a remote index. + return new String[] { null, indexExpression }; + } + int i = indexExpression.indexOf(RemoteClusterService.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (i == 0) { + throw new IllegalArgumentException("index name [" + indexExpression + "] is invalid because the remote part is empty"); + } + if (i < 0) { + return new String[] { null, indexExpression }; + } else { + return new String[] { indexExpression.substring(0, i), indexExpression.substring(i + 1) }; + } + } + /** * Groups indices per cluster by splitting remote cluster-alias, index-name pairs on {@link #REMOTE_CLUSTER_INDEX_SEPARATOR}. All * indices per cluster are collected as a list in the returned map keyed by the cluster alias. Local indices are grouped under @@ -77,18 +116,20 @@ protected Map> groupClusterIndices(Set remoteCluste for (String index : requestIndices) { // ensure that `index` is a remote name and not a datemath expression which includes ':' symbol // Remote names can not start with '<' so we are assuming that if the first character is '<' then it is a datemath expression. - boolean isDateMathExpression = (index.charAt(0) == '<' || index.startsWith("-<")); - int i = index.indexOf(RemoteClusterService.REMOTE_CLUSTER_INDEX_SEPARATOR); - if (isDateMathExpression == false && i >= 0) { + String[] split = splitIndexName(index); + if (split[0] != null) { if (isRemoteClusterClientEnabled == false) { assert remoteClusterNames.isEmpty() : remoteClusterNames; throw new IllegalArgumentException("node [" + nodeName + "] does not have the remote cluster client role enabled"); } - int startIdx = index.charAt(0) == '-' ? 1 : 0; - String remoteClusterName = index.substring(startIdx, i); - List clusters = ClusterNameExpressionResolver.resolveClusterNames(remoteClusterNames, remoteClusterName); - String indexName = index.substring(i + 1); - if (startIdx == 1) { + String remoteClusterName = split[0]; + String indexName = split[1]; + boolean isNegative = remoteClusterName.startsWith("-"); + List clusters = ClusterNameExpressionResolver.resolveClusterNames( + remoteClusterNames, + isNegative ? remoteClusterName.substring(1) : remoteClusterName + ); + if (isNegative) { if (indexName.equals("*") == false) { throw new IllegalArgumentException( Strings.format( diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java index 126393b688d5d..c21cd4dd2f714 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java @@ -12,8 +12,9 @@ import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.RemoteClusterActionType; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.RemoteClusterClient; -import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.core.Nullable; import java.util.concurrent.Executor; @@ -35,41 +36,48 @@ final class RemoteClusterAwareClient implements RemoteClusterClient { @Override public void execute( + Transport.Connection connection, RemoteClusterActionType action, Request request, ActionListener listener ) { - maybeEnsureConnected(listener.delegateFailureAndWrap((delegateListener, v) -> { - final Transport.Connection connection; - try { - if (request instanceof RemoteClusterAwareRequest) { - DiscoveryNode preferredTargetNode = ((RemoteClusterAwareRequest) request).getPreferredTargetNode(); - connection = remoteClusterService.getConnection(preferredTargetNode, clusterAlias); + service.sendRequest( + connection, + action.name(), + request, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(listener, action.getResponseReader(), responseExecutor) + ); + } + + @Override + public void getConnection(@Nullable Request request, ActionListener listener) { + SubscribableListener + + .newForked(ensureConnectedListener -> { + if (ensureConnected) { + remoteClusterService.ensureConnected(clusterAlias, ensureConnectedListener); } else { - connection = remoteClusterService.getConnection(clusterAlias); + ensureConnectedListener.onResponse(null); } - } catch (ConnectTransportException e) { - if (ensureConnected == false) { - // trigger another connection attempt, but don't wait for it to complete - remoteClusterService.ensureConnected(clusterAlias, ActionListener.noop()); + }) + + .andThenApply(ignored -> { + try { + if (request instanceof RemoteClusterAwareRequest remoteClusterAwareRequest) { + return remoteClusterService.getConnection(remoteClusterAwareRequest.getPreferredTargetNode(), clusterAlias); + } else { + return remoteClusterService.getConnection(clusterAlias); + } + } catch (ConnectTransportException e) { + if (ensureConnected == false) { + // trigger another connection attempt, but don't wait for it to complete + remoteClusterService.ensureConnected(clusterAlias, ActionListener.noop()); + } + throw e; } - throw e; - } - service.sendRequest( - connection, - action.name(), - request, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(delegateListener, action.getResponseReader(), responseExecutor) - ); - })); - } + }) - private void maybeEnsureConnected(ActionListener ensureConnectedListener) { - if (ensureConnected) { - ActionListener.run(ensureConnectedListener, l -> remoteClusterService.ensureConnected(clusterAlias, l)); - } else { - ensureConnectedListener.onResponse(null); - } + .addListener(listener); } } diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrationInfo.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrationInfo.java index f8adead63ea32..a0e9461558b6b 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrationInfo.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrationInfo.java @@ -240,7 +240,7 @@ private static Settings copySettingsForNewIndex(Settings currentIndexSettings, I /** * Convenience factory method holding the logic for creating instances from a Feature object. * @param feature The feature that - * @param metadata The current metadata, as index migration depends on the current state of the clsuter. + * @param metadata The current metadata, as index migration depends on the current state of the cluster. * @param indexScopedSettings This is necessary to make adjustments to the indices settings for unmanaged indices. * @return A {@link Stream} of {@link SystemIndexMigrationInfo}s that represent all the indices the given feature currently owns. */ diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 19528a9719e22..44c752def351e 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -129,3 +129,4 @@ 8.14.3,8636001 8.15.0,8702002 8.15.1,8702002 +8.15.2,8702003 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 2e684719688f9..971940041f9b1 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -129,3 +129,4 @@ 8.14.3,8505000 8.15.0,8512000 8.15.1,8512000 +8.15.2,8512000 diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java index dc66bae52dc2a..2c618f19a3c75 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java @@ -88,9 +88,10 @@ public void writeTo(StreamOutput out) throws IOException { null ); - final var ex = asInstanceOf( + final var ex = safeAwaitFailure( IllegalArgumentException.class, - safeAwaitFailure(ResolveClusterActionResponse.class, listener -> action.doExecute(null, request, listener)) + ResolveClusterActionResponse.class, + listener -> action.doExecute(null, request, listener) ); assertThat(ex.getMessage(), containsString("not compatible with version")); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index 1f0e3c52d55bb..a7fa81eb24a57 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.IndicesModule; @@ -30,9 +29,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.Before; import java.io.IOException; @@ -276,72 +273,4 @@ public void testValidation() { ); } } - - public void testParsingWithType() throws Exception { - final XContentBuilder builder = XContentFactory.jsonBuilder() - .startObject() - .startObject("conditions") - .field("max_age", "10d") - .field("max_docs", 100) - .endObject() - .startObject("mappings") - .startObject("type1") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject() - .endObject() - .startObject("settings") - .field("number_of_shards", 10) - .endObject() - .startObject("aliases") - .startObject("alias1") - .endObject() - .endObject() - .endObject(); - - try ( - XContentParser parser = createParserWithCompatibilityFor( - JsonXContent.jsonXContent, - BytesReference.bytes(builder).utf8ToString(), - RestApiVersion.V_7 - ) - ) { - final RolloverRequest request = new RolloverRequest(randomAlphaOfLength(10), randomAlphaOfLength(10)); - request.fromXContent(true, parser); - Map> conditions = request.getConditions().getConditions(); - assertThat(conditions.size(), equalTo(2)); - assertThat(request.getCreateIndexRequest().mappings(), equalTo(""" - {"_doc":{"properties":{"field1":{"index":"not_analyzed","type":"string"}}}}""")); - } - } - - public void testTypedRequestWithoutIncludeTypeName() throws IOException { - final XContentBuilder builder = XContentFactory.jsonBuilder() - .startObject() - .startObject("mappings") - .startObject("_doc") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); - try ( - XContentParser parser = createParserWithCompatibilityFor( - JsonXContent.jsonXContent, - BytesReference.bytes(builder).utf8ToString(), - RestApiVersion.V_7 - ) - ) { - final RolloverRequest request = new RolloverRequest(randomAlphaOfLength(10), randomAlphaOfLength(10)); - expectThrows(IllegalArgumentException.class, () -> request.fromXContent(false, parser)); - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java index 3be942bcd291e..b87dfd07181dc 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -42,7 +42,6 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.shard.ShardId; @@ -1022,7 +1021,6 @@ private BulkOperation newBulkOperation(NodeClient client, BulkRequest request, A client, request, new AtomicArray<>(request.numberOfActions()), - Map.of(), mockObserver(DEFAULT_STATE), listener, new FailureStoreDocumentConverter() @@ -1040,7 +1038,6 @@ private BulkOperation newBulkOperation( client, request, new AtomicArray<>(request.numberOfActions()), - Map.of(), mockObserver(DEFAULT_STATE), listener, failureStoreDocumentConverter @@ -1059,7 +1056,6 @@ private BulkOperation newBulkOperation( client, request, new AtomicArray<>(request.numberOfActions()), - Map.of(), observer, listener, new FailureStoreDocumentConverter() @@ -1071,7 +1067,6 @@ private BulkOperation newBulkOperation( NodeClient client, BulkRequest request, AtomicArray existingResponses, - Map indicesThatCanNotBeCreated, ClusterStateObserver observer, ActionListener listener, FailureStoreDocumentConverter failureStoreDocumentConverter @@ -1100,7 +1095,6 @@ private BulkOperation newBulkOperation( request, client, existingResponses, - indicesThatCanNotBeCreated, indexNameExpressionResolver, () -> endTime, timeZero, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java deleted file mode 100644 index 2f5e6b22e1a8c..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action.bulk; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ActionTestUtils; -import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.IndexingPressure; -import org.elasticsearch.index.VersionType; -import org.elasticsearch.indices.EmptySystemIndices; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.MockUtils; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; - -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; -import java.util.function.Consumer; -import java.util.function.Function; - -import static java.util.Collections.emptySet; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class TransportBulkActionIndicesThatCannotBeCreatedTests extends ESTestCase { - private static final Consumer noop = index -> {}; - - public void testNonExceptional() { - BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new IndexRequest(randomAlphaOfLength(5))); - bulkRequest.add(new IndexRequest(randomAlphaOfLength(5))); - bulkRequest.add(new DeleteRequest(randomAlphaOfLength(5))); - bulkRequest.add(new UpdateRequest(randomAlphaOfLength(5), randomAlphaOfLength(5))); - // Test emulating that index can be auto-created - indicesThatCannotBeCreatedTestCase(emptySet(), bulkRequest, index -> true, noop); - // Test emulating that index cannot be auto-created - indicesThatCannotBeCreatedTestCase(emptySet(), bulkRequest, index -> false, noop); - // Test emulating auto_create_index=true with some indices already created. - indicesThatCannotBeCreatedTestCase(emptySet(), bulkRequest, index -> randomBoolean(), noop); - } - - public void testAllFail() { - BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new IndexRequest("no")); - bulkRequest.add(new IndexRequest("can't")); - bulkRequest.add(new DeleteRequest("do").version(0).versionType(VersionType.EXTERNAL)); - bulkRequest.add(new UpdateRequest("nothin", randomAlphaOfLength(5))); - indicesThatCannotBeCreatedTestCase(Set.of("no", "can't", "do", "nothin"), bulkRequest, index -> true, index -> { - throw new IndexNotFoundException("Can't make it because I say so"); - }); - } - - public void testSomeFail() { - BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new IndexRequest("ok")); - bulkRequest.add(new IndexRequest("bad")); - // Emulate auto_create_index=-bad,+* - indicesThatCannotBeCreatedTestCase(Set.of("bad"), bulkRequest, index -> true, index -> { - if (index.equals("bad")) { - throw new IndexNotFoundException("Can't make it because I say so"); - } - }); - } - - private void indicesThatCannotBeCreatedTestCase( - Set expected, - BulkRequest bulkRequest, - Function shouldAutoCreate, - Consumer simulateAutoCreate - ) { - ClusterService clusterService = mock(ClusterService.class); - ClusterState state = mock(ClusterState.class); - when(state.getMetadata()).thenReturn(Metadata.EMPTY_METADATA); - when(state.metadata()).thenReturn(Metadata.EMPTY_METADATA); - when(state.blocks()).thenReturn(mock(ClusterBlocks.class)); - when(clusterService.state()).thenReturn(state); - when(clusterService.getSettings()).thenReturn(Settings.EMPTY); - - DiscoveryNode localNode = mock(DiscoveryNode.class); - when(clusterService.localNode()).thenReturn(localNode); - when(localNode.isIngestNode()).thenReturn(randomBoolean()); - - final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver( - new ThreadContext(Settings.EMPTY), - EmptySystemIndices.INSTANCE - ) { - @Override - public boolean hasIndexAbstraction(String indexAbstraction, ClusterState state) { - return shouldAutoCreate.apply(indexAbstraction) == false; - } - }; - - final ThreadPool threadPool = mock(ThreadPool.class); - TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); - FeatureService mockFeatureService = mock(FeatureService.class); - when(mockFeatureService.clusterHasFeature(any(), any())).thenReturn(true); - TransportBulkAction action = new TransportBulkAction( - threadPool, - transportService, - clusterService, - null, - mockFeatureService, - new NodeClient(Settings.EMPTY, threadPool), - mock(ActionFilters.class), - indexNameExpressionResolver, - new IndexingPressure(Settings.EMPTY), - EmptySystemIndices.INSTANCE, - FailureStoreMetrics.NOOP - ) { - @Override - void executeBulk( - Task task, - BulkRequest bulkRequest, - long startTimeNanos, - ActionListener listener, - Executor executor, - AtomicArray responses, - Map indicesThatCannotBeCreated - ) { - assertEquals(expected, indicesThatCannotBeCreated.keySet()); - } - - @Override - void createIndex(CreateIndexRequest createIndexRequest, ActionListener listener) { - String index = createIndexRequest.index(); - try { - simulateAutoCreate.accept(index); - // If we try to create an index just immediately assume it worked - listener.onResponse(new CreateIndexResponse(true, true, index)); - } catch (Exception e) { - listener.onFailure(e); - } - } - }; - action.doExecute(null, bulkRequest, ActionTestUtils.assertNoFailureListener(bulkItemResponse -> {})); - } -} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index abdb02924e26c..112748cd73627 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexingPressure; @@ -168,8 +167,7 @@ void executeBulk( long startTimeNanos, ActionListener listener, Executor executor, - AtomicArray responses, - Map indicesThatCannotBeCreated + AtomicArray responses ) { assertTrue(indexCreated); isExecuted = true; diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 5d55e22a080cd..eae6fbf9cbc7f 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -15,6 +15,8 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.bulk.TransportBulkActionTookTests.Resolver; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -39,6 +41,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.features.FeatureService; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -90,7 +93,9 @@ public class TransportBulkActionTests extends ESTestCase { class TestTransportBulkAction extends TransportBulkAction { - volatile boolean failIndexCreation = false; + volatile Exception failIndexCreationException; + volatile Exception failDataStreamRolloverException; + volatile Exception failFailureStoreRolloverException; boolean indexCreated = false; // set when the "real" index is created Runnable beforeIndexCreation = null; @@ -116,12 +121,25 @@ void createIndex(CreateIndexRequest createIndexRequest, ActionListener listener) { + if (failDataStreamRolloverException != null && rolloverRequest.targetsFailureStore() == false) { + listener.onFailure(failDataStreamRolloverException); + } else if (failFailureStoreRolloverException != null && rolloverRequest.targetsFailureStore()) { + listener.onFailure(failFailureStoreRolloverException); + } else { + listener.onResponse( + new RolloverResponse(null, null, Map.of(), rolloverRequest.isDryRun(), true, true, true, rolloverRequest.isLazy()) + ); + } + } } @Before @@ -357,7 +375,7 @@ public void testRejectCoordination() { public void testRejectionAfterCreateIndexIsPropagated() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); - bulkAction.failIndexCreation = randomBoolean(); + bulkAction.failIndexCreationException = randomBoolean() ? new ResourceAlreadyExistsException("index already exists") : null; final var blockingLatch = new CountDownLatch(1); try { bulkAction.beforeIndexCreation = () -> blockWriteThreadPool(blockingLatch); @@ -467,6 +485,76 @@ public void testResolveFailureStoreFromTemplate() throws Exception { assertThat(TransportBulkAction.resolveFailureInternal(indexTemplate + "-1", metadata, testTime), is(nullValue())); } + /** + * This test asserts that any failing prerequisite action that fails (i.e. index creation or data stream/failure store rollover) + * results in a failed response. + */ + public void testFailuresDuringPrerequisiteActions() throws InterruptedException { + // One request for testing a failure during index creation. + BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").source(Map.of())) + // One request for testing a failure during data stream rollover. + .add(new IndexRequest("data-stream").source(Map.of())) + // One request for testing a failure during failure store rollover. + .add(new IndexRequest("failure-store").source(Map.of()).setWriteToFailureStore(true)); + + // Construct a cluster state that contains the required data streams. + var state = clusterService.state() + .copyAndUpdateMetadata( + builder -> builder.put(indexMetadata(".ds-data-stream-01")) + .put(indexMetadata(".ds-failure-store-01")) + .put(indexMetadata(".fs-failure-store-01")) + .put( + DataStream.builder( + "data-stream", + DataStream.DataStreamIndices.backingIndicesBuilder(List.of(new Index(".ds-data-stream-01", randomUUID()))) + .setRolloverOnWrite(true) + .build() + ).build() + ) + .put( + DataStream.builder("failure-store", List.of(new Index(".ds-failure-store-01", randomUUID()))) + .setFailureIndices( + DataStream.DataStreamIndices.failureIndicesBuilder(List.of(new Index(".fs-failure-store-01", randomUUID()))) + .setRolloverOnWrite(true) + .build() + ) + .build() + ) + ); + + // Apply the cluster state. + CountDownLatch latch = new CountDownLatch(1); + clusterService.getClusterApplierService().onNewClusterState("set-state", () -> state, ActionListener.running(latch::countDown)); + // And wait for it to be applied. + latch.await(10L, TimeUnit.SECONDS); + + // Set the exceptions that the transport action should encounter. + bulkAction.failIndexCreationException = new IndexNotFoundException("index"); + bulkAction.failDataStreamRolloverException = new RuntimeException("data-stream-rollover-exception"); + bulkAction.failFailureStoreRolloverException = new RuntimeException("failure-store-rollover-exception"); + + // Execute the action and get the response. + PlainActionFuture future = new PlainActionFuture<>(); + ActionTestUtils.execute(bulkAction, null, bulkRequest, future); + BulkResponse response = future.actionGet(); + assertEquals(3, response.getItems().length); + + var indexFailure = response.getItems()[0]; + assertTrue(indexFailure.isFailed()); + assertTrue(indexFailure.getFailure().getCause() instanceof IndexNotFoundException); + assertNull(bulkRequest.requests.get(0)); + + var dataStreamFailure = response.getItems()[1]; + assertTrue(dataStreamFailure.isFailed()); + assertEquals("data-stream-rollover-exception", dataStreamFailure.getFailure().getCause().getMessage()); + assertNull(bulkRequest.requests.get(1)); + + var failureStoreFailure = response.getItems()[2]; + assertTrue(failureStoreFailure.isFailed()); + assertEquals("failure-store-rollover-exception", failureStoreFailure.getFailure().getCause().getMessage()); + assertNull(bulkRequest.requests.get(2)); + } + private BulkRequest buildBulkRequest(List indices) { BulkRequest request = new BulkRequest(); for (String index : indices) { @@ -488,4 +576,8 @@ private BulkRequest buildBulkStreamRequest(List indices) throws IOExcept StreamInput streamInput = out.bytes().streamInput(); return (new BulkRequest(streamInput)); } + + private static IndexMetadata.Builder indexMetadata(String index) { + return IndexMetadata.builder(index).settings(settings(IndexVersion.current())).numberOfShards(1).numberOfReplicas(1); + } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index 7f15fd0ec2582..b3d3ebe5e1357 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.indices.EmptySystemIndices; @@ -50,7 +49,6 @@ import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashSet; -import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; @@ -143,11 +141,10 @@ void executeBulk( long startTimeNanos, ActionListener listener, Executor executor, - AtomicArray responses, - Map indicesThatCannotBeCreated + AtomicArray responses ) { expected.set(1000000); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses); } }; } else { @@ -168,12 +165,11 @@ void executeBulk( long startTimeNanos, ActionListener listener, Executor executor, - AtomicArray responses, - Map indicesThatCannotBeCreated + AtomicArray responses ) { long elapsed = spinForAtLeastOneMillisecond(); expected.set(elapsed); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses); } }; } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesActionTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesActionTests.java index 0c07896986022..3031f17cccc62 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesActionTests.java @@ -87,9 +87,10 @@ protected void doWriteTo(StreamOutput out) throws IOException { null ); - IllegalArgumentException ex = asInstanceOf( + IllegalArgumentException ex = safeAwaitFailure( IllegalArgumentException.class, - safeAwaitFailure(FieldCapabilitiesResponse.class, l -> action.doExecute(null, fieldCapsRequest, l)) + FieldCapabilitiesResponse.class, + l -> action.doExecute(null, fieldCapsRequest, l) ); assertThat( diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index 8211fc8dfa4c6..f2bc561792991 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -471,52 +471,6 @@ public void testWritingExpandWildcards() throws IOException { ); } - public void testEmptyFirstLine1() throws Exception { - MultiSearchRequest request = parseMultiSearchRequestFromString(""" - - - { "query": {"match_all": {}}} - {} - { "query": {"match_all": {}}} - - { "query": {"match_all": {}}} - {} - { "query": {"match_all": {}}} - """, RestApiVersion.V_7); - assertThat(request.requests().size(), equalTo(4)); - for (SearchRequest searchRequest : request.requests()) { - assertThat(searchRequest.indices().length, equalTo(0)); - assertThat(searchRequest.source().query(), instanceOf(MatchAllQueryBuilder.class)); - } - assertCriticalWarnings( - "support for empty first line before any action metadata in msearch API is deprecated and will be removed " - + "in the next major version" - ); - } - - public void testEmptyFirstLine2() throws Exception { - MultiSearchRequest request = parseMultiSearchRequestFromString(""" - - {} - { "query": {"match_all": {}}} - - { "query": {"match_all": {}}} - {} - { "query": {"match_all": {}}} - - { "query": {"match_all": {}}} - """, RestApiVersion.V_7); - assertThat(request.requests().size(), equalTo(4)); - for (SearchRequest searchRequest : request.requests()) { - assertThat(searchRequest.indices().length, equalTo(0)); - assertThat(searchRequest.source().query(), instanceOf(MatchAllQueryBuilder.class)); - } - assertCriticalWarnings( - "support for empty first line before any action metadata in msearch API is deprecated and will be removed " - + "in the next major version" - ); - } - public void testTaskDescription() { MultiSearchRequest request = new MultiSearchRequest(); request.add(new SearchRequest().preference("abc")); diff --git a/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java b/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java index 9d40911059b87..87f5df26d2b5d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -70,7 +71,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws ShardSearchFailure parsed; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - parsed = ShardSearchFailure.fromXContent(parser); + parsed = SearchResponseUtils.parseShardSearchFailure(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index cd568ab1571f5..e0cd8d8390c74 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -344,10 +344,8 @@ public void testGlobalBlock() { assertEquals( "blocked by: [SERVICE_UNAVAILABLE/1/test-block];", - asInstanceOf( - ClusterBlockException.class, - safeAwaitFailure(Response.class, listener -> action.doExecute(null, request, listener)) - ).getMessage() + safeAwaitFailure(ClusterBlockException.class, Response.class, listener -> action.doExecute(null, request, listener)) + .getMessage() ); } @@ -362,10 +360,8 @@ public void testRequestBlock() { setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block)); assertEquals( "index [" + TEST_INDEX + "] blocked by: [SERVICE_UNAVAILABLE/1/test-block];", - asInstanceOf( - ClusterBlockException.class, - safeAwaitFailure(Response.class, listener -> action.doExecute(null, request, listener)) - ).getMessage() + safeAwaitFailure(ClusterBlockException.class, Response.class, listener -> action.doExecute(null, request, listener)) + .getMessage() ); } diff --git a/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java b/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java index 143858625ca76..7736ae63724f6 100644 --- a/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java +++ b/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportResponse; import java.util.concurrent.Executor; @@ -83,6 +84,24 @@ public void assertSame(parentTaskId, request.getParentTask()); listener.onFailure(new UnsupportedOperationException("fake remote-cluster client")); } + + @Override + public void execute( + Transport.Connection connection, + RemoteClusterActionType action, + Request request, + ActionListener listener + ) { + execute(action, request, listener); + } + + @Override + public void getConnection( + Request request, + ActionListener listener + ) { + listener.onResponse(null); + } }; } }; @@ -95,15 +114,27 @@ public void ); assertEquals( "fake remote-cluster client", - asInstanceOf( + safeAwaitFailure( + UnsupportedOperationException.class, + ClusterStateResponse.class, + listener -> remoteClusterClient.execute( + ClusterStateAction.REMOTE_TYPE, + new ClusterStateRequest(TEST_REQUEST_TIMEOUT), + listener + ) + ).getMessage() + ); + + assertEquals( + "fake remote-cluster client", + safeAwaitFailure( UnsupportedOperationException.class, - safeAwaitFailure( - ClusterStateResponse.class, - listener -> remoteClusterClient.execute( - ClusterStateAction.REMOTE_TYPE, - new ClusterStateRequest(TEST_REQUEST_TIMEOUT), - listener - ) + ClusterStateResponse.class, + listener -> remoteClusterClient.execute( + null, + ClusterStateAction.REMOTE_TYPE, + new ClusterStateRequest(TEST_REQUEST_TIMEOUT), + listener ) ).getMessage() ); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 76573049c6962..8cb7867cff436 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -198,7 +198,7 @@ protected DataStream mutateInstance(DataStream instance) { public void testRollover() { DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); - final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); + final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); @@ -225,7 +225,7 @@ public void testRolloverWithConflictingBackingIndexName() { } final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build(), ds.getBackingIndices()); - final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); + final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + numConflictingIndices + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); @@ -242,7 +242,12 @@ public void testRolloverUpgradeToTsdbDataStream() { .build(); var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); - var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), true, null); + var rolledDs = ds.rollover( + new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), + newCoordinates.v2(), + IndexMode.TIME_SERIES, + null + ); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); @@ -251,11 +256,41 @@ public void testRolloverUpgradeToTsdbDataStream() { assertThat(rolledDs.getIndexMode(), equalTo(IndexMode.TIME_SERIES)); } - public void testRolloverDowngradeToRegularDataStream() { + public void testRolloverUpgradeToLogsdbDataStream() { + DataStream ds = DataStreamTestHelper.randomInstance() + .copy() + .setReplicated(false) + .setIndexMode(randomBoolean() ? IndexMode.STANDARD : null) + .build(); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); + + var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), IndexMode.LOGSDB, null); + assertThat(rolledDs.getName(), equalTo(ds.getName())); + assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); + assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); + assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); + assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertThat(rolledDs.getIndexMode(), equalTo(IndexMode.LOGSDB)); + } + + public void testRolloverDowngradeFromTsdbToRegularDataStream() { DataStream ds = DataStreamTestHelper.randomInstance().copy().setReplicated(false).setIndexMode(IndexMode.TIME_SERIES).build(); var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); - var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); + var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null, null); + assertThat(rolledDs.getName(), equalTo(ds.getName())); + assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); + assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); + assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); + assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertThat(rolledDs.getIndexMode(), nullValue()); + } + + public void testRolloverDowngradeFromLogsdbToRegularDataStream() { + DataStream ds = DataStreamTestHelper.randomInstance().copy().setReplicated(false).setIndexMode(IndexMode.LOGSDB).build(); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); + + var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java index bbcf1ca33a0c2..6c76abf7cebe3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.ExecutorNames; @@ -77,6 +78,43 @@ public void testCreateDataStream() throws Exception { assertThat(newState.metadata().dataStreams().get(dataStreamName).isHidden(), is(false)); assertThat(newState.metadata().dataStreams().get(dataStreamName).isReplicated(), is(false)); assertThat(newState.metadata().dataStreams().get(dataStreamName).getLifecycle(), equalTo(DataStreamLifecycle.DEFAULT)); + assertThat(newState.metadata().dataStreams().get(dataStreamName).getIndexMode(), nullValue()); + assertThat(newState.metadata().index(DataStream.getDefaultBackingIndexName(dataStreamName, 1)), notNullValue()); + assertThat( + newState.metadata().index(DataStream.getDefaultBackingIndexName(dataStreamName, 1)).getSettings().get("index.hidden"), + equalTo("true") + ); + assertThat(newState.metadata().index(DataStream.getDefaultBackingIndexName(dataStreamName, 1)).isSystem(), is(false)); + } + + public void testCreateDataStreamLogsdb() throws Exception { + final MetadataCreateIndexService metadataCreateIndexService = getMetadataCreateIndexService(); + final String dataStreamName = "my-data-stream"; + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName + "*")) + .template(new Template(Settings.builder().put("index.mode", "logsdb").build(), null, null)) + .dataStreamTemplate(new DataStreamTemplate()) + .build(); + ClusterState cs = ClusterState.builder(new ClusterName("_name")) + .metadata(Metadata.builder().put("template", template).build()) + .build(); + CreateDataStreamClusterStateUpdateRequest req = new CreateDataStreamClusterStateUpdateRequest(dataStreamName); + ClusterState newState = MetadataCreateDataStreamService.createDataStream( + metadataCreateIndexService, + Settings.EMPTY, + cs, + true, + req, + ActionListener.noop(), + false + ); + assertThat(newState.metadata().dataStreams().size(), equalTo(1)); + assertThat(newState.metadata().dataStreams().get(dataStreamName).getName(), equalTo(dataStreamName)); + assertThat(newState.metadata().dataStreams().get(dataStreamName).isSystem(), is(false)); + assertThat(newState.metadata().dataStreams().get(dataStreamName).isHidden(), is(false)); + assertThat(newState.metadata().dataStreams().get(dataStreamName).isReplicated(), is(false)); + assertThat(newState.metadata().dataStreams().get(dataStreamName).getIndexMode(), equalTo(IndexMode.LOGSDB)); + assertThat(newState.metadata().dataStreams().get(dataStreamName).getLifecycle(), equalTo(DataStreamLifecycle.DEFAULT)); assertThat(newState.metadata().index(DataStream.getDefaultBackingIndexName(dataStreamName, 1)), notNullValue()); assertThat( newState.metadata().index(DataStream.getDefaultBackingIndexName(dataStreamName, 1)).getSettings().get("index.hidden"), diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java index 1f80160c92ffd..1ae73c9c08137 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java @@ -1258,8 +1258,8 @@ public void testShouldLogOnTooManyUndesiredAllocations() { final int shardCount = 5; - final var dataNode1Assignments = Maps.newMapWithExpectedSize(shardCount); - final var dataNode2Assignments = Maps.newMapWithExpectedSize(shardCount); + final var allShardsDesiredOnDataNode1 = Maps.newMapWithExpectedSize(shardCount); + final var allShardsDesiredOnDataNode2 = Maps.newMapWithExpectedSize(shardCount); final var metadataBuilder = Metadata.builder(); final var routingTableBuilder = RoutingTable.builder(); @@ -1270,10 +1270,23 @@ public void testShouldLogOnTooManyUndesiredAllocations() { metadataBuilder.put(indexMetadata, false); routingTableBuilder.add(IndexRoutingTable.builder(index).addShard(newShardRouting(shardId, "data-node-1", true, STARTED))); - dataNode1Assignments.put(shardId, new ShardAssignment(Set.of("data-node-1"), 1, 0, 0)); - dataNode2Assignments.put(shardId, new ShardAssignment(Set.of("data-node-2"), 1, 0, 0)); + allShardsDesiredOnDataNode1.put(shardId, new ShardAssignment(Set.of("data-node-1"), 1, 0, 0)); + allShardsDesiredOnDataNode2.put(shardId, new ShardAssignment(Set.of("data-node-2"), 1, 0, 0)); } + final var node1ShuttingDown = randomBoolean(); + if (node1ShuttingDown) { + var type = randomFrom(SingleNodeShutdownMetadata.Type.SIGTERM, SingleNodeShutdownMetadata.Type.REMOVE); + var builder = SingleNodeShutdownMetadata.builder() + .setType(type) + .setNodeId("data-node-1") + .setStartedAtMillis(randomNonNegativeLong()) + .setReason("test"); + if (type.equals(SingleNodeShutdownMetadata.Type.SIGTERM)) { + builder.setGracePeriod(randomPositiveTimeValue()); + } + metadataBuilder.putCustom(NodesShutdownMetadata.TYPE, new NodesShutdownMetadata(Map.of("data-node-1", builder.build()))); + } final var clusterState = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(newNode("data-node-1")).add(newNode("data-node-2"))) .metadata(metadataBuilder) @@ -1293,8 +1306,9 @@ public void testShouldLogOnTooManyUndesiredAllocations() { + "/" + shardCount + ") are not on their desired nodes, which exceeds the warn threshold of [10%]"; + // Desired assignment matches current routing table assertThatLogger( - () -> reconciler.reconcile(new DesiredBalance(1, dataNode1Assignments), createRoutingAllocationFrom(clusterState)), + () -> reconciler.reconcile(new DesiredBalance(1, allShardsDesiredOnDataNode1), createRoutingAllocationFrom(clusterState)), DesiredBalanceReconciler.class, new MockLog.UnseenEventExpectation( "Should not log if all shards on desired location", @@ -1304,17 +1318,24 @@ public void testShouldLogOnTooManyUndesiredAllocations() { ) ); assertThatLogger( - () -> reconciler.reconcile(new DesiredBalance(1, dataNode2Assignments), createRoutingAllocationFrom(clusterState)), + () -> reconciler.reconcile(new DesiredBalance(1, allShardsDesiredOnDataNode2), createRoutingAllocationFrom(clusterState)), DesiredBalanceReconciler.class, - new MockLog.SeenEventExpectation( - "Should log first too many shards on undesired locations", - DesiredBalanceReconciler.class.getCanonicalName(), - Level.WARN, - expectedWarningMessage - ) + node1ShuttingDown + ? new MockLog.UnseenEventExpectation( + "Should not log first too many shards on undesired locations", + DesiredBalanceReconciler.class.getCanonicalName(), + Level.WARN, + expectedWarningMessage + ) + : new MockLog.SeenEventExpectation( + "Should log first too many shards on undesired locations", + DesiredBalanceReconciler.class.getCanonicalName(), + Level.WARN, + expectedWarningMessage + ) ); assertThatLogger( - () -> reconciler.reconcile(new DesiredBalance(1, dataNode2Assignments), createRoutingAllocationFrom(clusterState)), + () -> reconciler.reconcile(new DesiredBalance(1, allShardsDesiredOnDataNode2), createRoutingAllocationFrom(clusterState)), DesiredBalanceReconciler.class, new MockLog.UnseenEventExpectation( "Should not log immediate second too many shards on undesired locations", diff --git a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java index e37e92d553dcc..7c1c954e7b4e9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java @@ -181,7 +181,7 @@ public void onFailure(Exception e) { fail(); } }); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 498c04c005304..a540649582177 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -526,7 +526,7 @@ public void onFailure(Exception e) { fail(); } }); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); } } diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 981eae9d60694..77133516f37d5 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -420,15 +420,15 @@ protected void populatePerRequestThreadContext(RestRequest restRequest, ThreadCo } public void testHandlingCompatibleVersionParsingErrors() { - // a compatible version exception (v7 on accept and v8 on content-type) should be handled gracefully + // a compatible version exception (v8 on accept and v9 on content-type) should be handled gracefully final ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); try ( AbstractHttpServerTransport transport = failureAssertingtHttpServerTransport(clusterSettings, Set.of("Accept", "Content-Type")) ) { Map> headers = new HashMap<>(); - headers.put("Accept", Collections.singletonList("aaa/bbb;compatible-with=7")); - headers.put("Content-Type", Collections.singletonList("aaa/bbb;compatible-with=8")); + headers.put("Accept", Collections.singletonList("aaa/bbb;compatible-with=8")); + headers.put("Content-Type", Collections.singletonList("aaa/bbb;compatible-with=9")); FakeRestRequest.FakeHttpRequest fakeHttpRequest = new FakeRestRequest.FakeHttpRequest( RestRequest.Method.GET, diff --git a/server/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java index d2aa11f9f3866..13c58fed1c5ad 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java @@ -27,6 +27,7 @@ import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; public class CustomNormalizerTests extends ESTokenStreamTestCase { private static final AnalysisPlugin MOCK_ANALYSIS_PLUGIN = new MockAnalysisPlugin(); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java b/server/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java index 3cfdbdcdf37da..0aa7652e5a5f6 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.io.StringReader; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.instanceOf; @ThreadLeakScope(Scope.NONE) diff --git a/server/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/server/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java index b02e05ae704ef..fc0e6cfab7a37 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java @@ -29,11 +29,13 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { public void testPositionIncrementSetting() throws IOException { + boolean versionSet = false; Builder builder = Settings.builder() .put("index.analysis.filter.my_stop.type", "stop") .put("index.analysis.filter.my_stop.enable_position_increments", false); if (random().nextBoolean()) { builder.put("index.analysis.filter.my_stop.version", "5.0"); + versionSet = true; } builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); Settings settings = builder.build(); @@ -43,14 +45,17 @@ public void testPositionIncrementSetting() throws IOException { } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("enable_position_increments is not supported anymore")); } + if (versionSet) { + assertWarnings("Setting [version] on analysis component [my_stop] has no effect and is deprecated"); + } } public void testCorrectPositionIncrementSetting() throws IOException { Builder builder = Settings.builder().put("index.analysis.filter.my_stop.type", "stop"); + boolean versionSet = false; if (random().nextBoolean()) { builder.put("index.analysis.filter.my_stop.version", Version.LATEST); - } else { - // don't specify + versionSet = true; } builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(builder.build()); @@ -60,6 +65,9 @@ public void testCorrectPositionIncrementSetting() throws IOException { tokenizer.setReader(new StringReader("foo bar")); TokenStream create = tokenFilter.create(tokenizer); assertThat(create, instanceOf(StopFilter.class)); + if (versionSet) { + assertWarnings("Setting [version] on analysis component [my_stop] has no effect and is deprecated"); + } } public void testThatSuggestStopFilterWorks() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index d50bea693cb6e..883723de31d46 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -3206,9 +3206,10 @@ public void testCurrentTranslogUUIIDIsCommitted() throws IOException { engine.syncTranslog(); // to advance persisted local checkpoint assertEquals(engine.getProcessedLocalCheckpoint(), engine.getPersistedLocalCheckpoint()); globalCheckpoint.set(engine.getPersistedLocalCheckpoint()); - asInstanceOf( + safeAwaitFailure( IllegalStateException.class, - safeAwaitFailure(Void.class, listener -> engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE, listener)) + Void.class, + listener -> engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE, listener) ); Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 71b52dc41705b..93f546eb288b9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -2307,6 +2307,60 @@ public void testSubobjectsFalseFlattened() throws Exception { assertNotNull(doc.rootDoc().getField("attributes.simple.attribute")); } + public void testSubobjectsAutoFlattened() throws Exception { + DocumentMapper mapper = createDocumentMapper(mapping(b -> { + b.startObject("attributes"); + { + b.field("dynamic", false); + b.field("subobjects", "auto"); + b.startObject("properties"); + { + b.startObject("simple.attribute").field("type", "keyword").endObject(); + b.startObject("complex.attribute").field("type", "flattened").endObject(); + b.startObject("path").field("type", "object"); + { + b.field("store_array_source", "true").field("subobjects", "auto"); + b.startObject("properties"); + { + b.startObject("nested.attribute").field("type", "keyword").endObject(); + } + b.endObject(); + } + b.endObject(); + b.startObject("flattened_object").field("type", "object"); + { + b.startObject("properties"); + { + b.startObject("nested.attribute").field("type", "keyword").endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + })); + ParsedDocument doc = mapper.parse(source(""" + { + "attributes": { + "complex.attribute": { + "foo" : "bar" + }, + "simple.attribute": "sa", + "path": { + "nested.attribute": "na" + }, + "flattened_object.nested.attribute": "fna" + } + } + """)); + assertNotNull(doc.rootDoc().getField("attributes.complex.attribute")); + assertNotNull(doc.rootDoc().getField("attributes.simple.attribute")); + assertNotNull(doc.rootDoc().getField("attributes.path.nested.attribute")); + assertNotNull(doc.rootDoc().getField("attributes.flattened_object.nested.attribute")); + } + public void testWriteToFieldAlias() throws Exception { DocumentMapper mapper = createDocumentMapper(mapping(b -> { b.startObject("alias-field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java index b38c65c1710d6..c4e223a4d1b77 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java @@ -13,9 +13,12 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.hamcrest.Matchers; import java.io.IOException; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -26,7 +29,7 @@ private void assertXContentMatches(String dotsExpanded, String withDots) throws final ContentPath contentPath = new ContentPath(); try ( XContentParser inputParser = createParser(JsonXContent.jsonXContent, withDots); - XContentParser expandedParser = DotExpandingXContentParser.expandDots(inputParser, contentPath) + XContentParser expandedParser = DotExpandingXContentParser.expandDots(inputParser, contentPath, null) ) { expandedParser.allowDuplicateKeys(true); @@ -37,7 +40,7 @@ private void assertXContentMatches(String dotsExpanded, String withDots) throws expectedParser.allowDuplicateKeys(true); try ( var p = createParser(JsonXContent.jsonXContent, withDots); - XContentParser actualParser = DotExpandingXContentParser.expandDots(p, contentPath) + XContentParser actualParser = DotExpandingXContentParser.expandDots(p, contentPath, null) ) { XContentParser.Token currentToken; while ((currentToken = actualParser.nextToken()) != null) { @@ -127,7 +130,7 @@ public void testDuplicateKeys() throws IOException { public void testDotsCollapsingFlatPaths() throws IOException { ContentPath contentPath = new ContentPath(); XContentParser parser = DotExpandingXContentParser.expandDots(createParser(JsonXContent.jsonXContent, """ - {"metrics.service.time": 10, "metrics.service.time.max": 500, "metrics.foo": "value"}"""), contentPath); + {"metrics.service.time": 10, "metrics.service.time.max": 500, "metrics.foo": "value"}"""), contentPath, null); parser.nextToken(); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("metrics", parser.currentName()); @@ -197,7 +200,7 @@ public void testDotsCollapsingStructuredPath() throws IOException { }, "foo" : "value" } - }"""), contentPath); + }"""), contentPath, null); parser.nextToken(); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("metrics", parser.currentName()); @@ -235,7 +238,7 @@ public void testDotsCollapsingStructuredPath() throws IOException { public void testSkipChildren() throws IOException { XContentParser parser = DotExpandingXContentParser.expandDots(createParser(JsonXContent.jsonXContent, """ - { "test.with.dots" : "value", "nodots" : "value2" }"""), new ContentPath()); + { "test.with.dots" : "value", "nodots" : "value2" }"""), new ContentPath(), null); parser.nextToken(); // start object assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("test", parser.currentName()); @@ -258,7 +261,7 @@ public void testSkipChildren() throws IOException { public void testSkipChildrenWithinInnerObject() throws IOException { XContentParser parser = DotExpandingXContentParser.expandDots(createParser(JsonXContent.jsonXContent, """ - { "test.with.dots" : {"obj" : {"field":"value"}}, "nodots" : "value2" }"""), new ContentPath()); + { "test.with.dots" : {"obj" : {"field":"value"}}, "nodots" : "value2" }"""), new ContentPath(), null); parser.nextToken(); // start object assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); @@ -306,7 +309,8 @@ public void testGetTokenLocation() throws IOException { XContentParser expectedParser = createParser(JsonXContent.jsonXContent, jsonInput); XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, jsonInput), - new ContentPath() + new ContentPath(), + null ); assertEquals(expectedParser.getTokenLocation(), dotExpandedParser.getTokenLocation()); @@ -364,7 +368,8 @@ public void testGetTokenLocation() throws IOException { public void testParseMapUOE() throws Exception { XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, ""), - new ContentPath() + new ContentPath(), + null ); expectThrows(UnsupportedOperationException.class, dotExpandedParser::map); } @@ -372,7 +377,8 @@ public void testParseMapUOE() throws Exception { public void testParseMapOrderedUOE() throws Exception { XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, ""), - new ContentPath() + new ContentPath(), + null ); expectThrows(UnsupportedOperationException.class, dotExpandedParser::mapOrdered); } @@ -380,7 +386,8 @@ public void testParseMapOrderedUOE() throws Exception { public void testParseMapStringsUOE() throws Exception { XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, ""), - new ContentPath() + new ContentPath(), + null ); expectThrows(UnsupportedOperationException.class, dotExpandedParser::mapStrings); } @@ -388,7 +395,8 @@ public void testParseMapStringsUOE() throws Exception { public void testParseMapSupplierUOE() throws Exception { XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, ""), - new ContentPath() + new ContentPath(), + null ); expectThrows(UnsupportedOperationException.class, () -> dotExpandedParser.map(HashMap::new, XContentParser::text)); } @@ -403,7 +411,8 @@ public void testParseMap() throws Exception { contentPath.setWithinLeafObject(true); XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, jsonInput), - contentPath + contentPath, + null ); assertEquals(XContentParser.Token.START_OBJECT, dotExpandedParser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, dotExpandedParser.nextToken()); @@ -418,7 +427,8 @@ public void testParseMap() throws Exception { public void testParseListUOE() throws Exception { XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, ""), - new ContentPath() + new ContentPath(), + null ); expectThrows(UnsupportedOperationException.class, dotExpandedParser::list); } @@ -426,7 +436,8 @@ public void testParseListUOE() throws Exception { public void testParseListOrderedUOE() throws Exception { XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, ""), - new ContentPath() + new ContentPath(), + null ); expectThrows(UnsupportedOperationException.class, dotExpandedParser::listOrderedMap); } @@ -440,7 +451,8 @@ public void testParseList() throws Exception { contentPath.setWithinLeafObject(true); XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( createParser(JsonXContent.jsonXContent, jsonInput), - contentPath + contentPath, + null ); assertEquals(XContentParser.Token.START_OBJECT, dotExpandedParser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, dotExpandedParser.nextToken()); @@ -450,4 +462,104 @@ public void testParseList() throws Exception { assertEquals("one", list.get(0)); assertEquals("two", list.get(1)); } + + private static DocumentParserContext createContext(XContentBuilder builder) throws IOException { + var documentMapper = new MapperServiceTestCase() { + }.createDocumentMapper(builder); + return new TestDocumentParserContext(documentMapper.mappers(), null); + } + + private static List getSubPaths(XContentBuilder builder, String... path) throws IOException { + DocumentParserContext context = createContext(builder); + return DotExpandingXContentParser.maybeFlattenPaths(Arrays.stream(path).toList(), context, new ContentPath()); + } + + private static List getSubPaths(XContentBuilder builder, List contentPath, List path) throws IOException { + DocumentParserContext context = createContext(builder); + ContentPath content = new ContentPath(); + for (String c : contentPath) { + content.add(c); + } + return DotExpandingXContentParser.maybeFlattenPaths(path, context, content); + } + + public void testAutoFlattening() throws Exception { + var b = XContentBuilder.builder(XContentType.JSON.xContent()); + b.startObject().startObject("_doc"); + { + b.field("subobjects", "auto"); + b.startObject("properties"); + { + b.startObject("path").startObject("properties"); + { + b.startObject("to").startObject("properties"); + { + b.startObject("field").field("type", "integer").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + b.startObject("path.auto").field("subobjects", "auto").startObject("properties"); + { + b.startObject("to").startObject("properties"); + { + b.startObject("some.field").field("type", "integer").endObject(); + } + b.endObject().endObject(); + b.startObject("inner.enabled").field("dynamic", "false").startObject("properties"); + { + b.startObject("field").field("type", "integer").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + b.startObject("path.disabled").field("subobjects", "false").startObject("properties"); + { + b.startObject("to").startObject("properties"); + { + b.startObject("some.field").field("type", "integer").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject(); + } + b.endObject().endObject(); + + // inner [subobjects:enabled] gets flattened + assertThat(getSubPaths(b, "field"), Matchers.contains("field")); + assertThat(getSubPaths(b, "path", "field"), Matchers.contains("path.field")); + assertThat(getSubPaths(b, "path", "to", "field"), Matchers.contains("path.to.field")); + assertThat(getSubPaths(b, "path", "to", "any"), Matchers.contains("path.to.any")); + + // inner [subobjects:auto] does not get flattened + assertThat(getSubPaths(b, "path", "auto", "field"), Matchers.contains("path.auto", "field")); + assertThat(getSubPaths(b, "path", "auto", "some", "field"), Matchers.contains("path.auto", "some.field")); + assertThat(getSubPaths(b, "path", "auto", "to", "some", "field"), Matchers.contains("path.auto", "to.some.field")); + assertThat(getSubPaths(b, "path", "auto", "to", "some", "other"), Matchers.contains("path.auto", "to.some.other")); + assertThat(getSubPaths(b, "path", "auto", "inner", "enabled", "field"), Matchers.contains("path.auto", "inner.enabled", "field")); + assertThat( + getSubPaths(b, "path", "auto", "inner", "enabled", "to", "some", "field"), + Matchers.contains("path.auto", "inner.enabled", "to", "some", "field") + ); + + // inner [subobjects:disabled] gets flattened + assertThat(getSubPaths(b, "path", "disabled", "field"), Matchers.contains("path.disabled.field")); + assertThat(getSubPaths(b, "path", "disabled", "some", "field"), Matchers.contains("path.disabled.some.field")); + assertThat(getSubPaths(b, "path", "disabled", "to", "some", "field"), Matchers.contains("path.disabled.to.some.field")); + assertThat(getSubPaths(b, "path", "disabled", "to", "some", "other"), Matchers.contains("path.disabled.to.some.other")); + + // Non-empty content path. + assertThat(getSubPaths(b, List.of("path"), List.of("field")), Matchers.contains("field")); + assertThat(getSubPaths(b, List.of("path"), List.of("to", "field")), Matchers.contains("to", "field")); + assertThat(getSubPaths(b, List.of("path", "to"), List.of("field")), Matchers.contains("field")); + assertThat(getSubPaths(b, List.of("path"), List.of("auto", "field")), Matchers.contains("auto", "field")); + assertThat(getSubPaths(b, List.of("path", "auto"), List.of("to", "some", "field")), Matchers.contains("to.some.field")); + assertThat( + getSubPaths(b, List.of("path", "auto"), List.of("inner", "enabled", "to", "some", "field")), + Matchers.contains("inner.enabled", "to", "some", "field") + ); + assertThat(getSubPaths(b, List.of("path", "disabled"), List.of("to", "some", "field")), Matchers.contains("to", "some", "field")); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 7f430cf676809..43ee47245f492 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -1619,10 +1619,9 @@ public void testSubobjectsAutoWithInnerNestedFromDynamicTemplate() throws IOExce assertNotNull(doc.rootDoc().get("metrics.time.max")); assertNotNull(doc.docs().get(0).get("metrics.time.foo")); - assertThat( - ((ObjectMapper) doc.dynamicMappingsUpdate().getRoot().getMapper("metrics")).getMapper("time"), - instanceOf(NestedObjectMapper.class) - ); + var metrics = ((ObjectMapper) doc.dynamicMappingsUpdate().getRoot().getMapper("metrics")); + assertThat(metrics.getMapper("time"), instanceOf(NestedObjectMapper.class)); + assertThat(metrics.getMapper("time.max"), instanceOf(NumberFieldMapper.class)); } public void testDynamicSubobject() throws IOException { @@ -2057,7 +2056,7 @@ public void testSubobjectsAutoFlattened() throws IOException { "dynamic_templates": [ { "test": { - "path_match": "attributes.resource.*", + "path_match": "attributes.*", "match_mapping_type": "object", "mapping": { "type": "flattened" @@ -2070,7 +2069,7 @@ public void testSubobjectsAutoFlattened() throws IOException { """; String docJson = """ { - "attributes.resource": { + "attributes": { "complex.attribute": { "a": "b" }, @@ -2083,14 +2082,67 @@ public void testSubobjectsAutoFlattened() throws IOException { ParsedDocument parsedDoc = mapperService.documentMapper().parse(source(docJson)); merge(mapperService, dynamicMapping(parsedDoc.dynamicMappingsUpdate())); - Mapper fooBarMapper = mapperService.documentMapper().mappers().getMapper("attributes.resource.foo.bar"); + Mapper fooBarMapper = mapperService.documentMapper().mappers().getMapper("attributes.foo.bar"); assertNotNull(fooBarMapper); assertEquals("text", fooBarMapper.typeName()); - Mapper fooStructuredMapper = mapperService.documentMapper().mappers().getMapper("attributes.resource.complex.attribute"); + Mapper fooStructuredMapper = mapperService.documentMapper().mappers().getMapper("attributes.complex.attribute"); assertNotNull(fooStructuredMapper); assertEquals("flattened", fooStructuredMapper.typeName()); } + public void testSubobjectsAutoWithObjectInDynamicTemplate() throws IOException { + String mapping = """ + { + "_doc": { + "properties": { + "attributes": { + "type": "object", + "subobjects": "auto" + } + }, + "dynamic_templates": [ + { + "test": { + "path_match": "attributes.*", + "match_mapping_type": "object", + "mapping": { + "type": "object", + "dynamic": "false", + "properties": { + "id": { + "type": "integer" + } + } + } + } + } + ] + } + } + """; + String docJson = """ + { + "attributes": { + "to": { + "id": 10 + }, + "foo.bar": "baz" + } + } + """; + + MapperService mapperService = createMapperService(mapping); + ParsedDocument parsedDoc = mapperService.documentMapper().parse(source(docJson)); + merge(mapperService, dynamicMapping(parsedDoc.dynamicMappingsUpdate())); + + Mapper fooBarMapper = mapperService.documentMapper().mappers().getMapper("attributes.foo.bar"); + assertNotNull(fooBarMapper); + assertEquals("text", fooBarMapper.typeName()); + Mapper innerObject = mapperService.documentMapper().mappers().objectMappers().get("attributes.to"); + assertNotNull(innerObject); + assertEquals("integer", mapperService.documentMapper().mappers().getMapper("attributes.to.id").typeName()); + } + public void testMatchWithArrayOfFieldNames() throws IOException { String mapping = """ { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index eaa7bf6528203..5d5273f0fc788 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -1549,6 +1549,66 @@ public void testCopyToLogicInsideObject() throws IOException { assertEquals("{\"path\":{\"at\":\"A\"}}", syntheticSource); } + public void testCopyToRootWithSubobjectFlattening() throws IOException { + DocumentMapper documentMapper = createMapperService(topMapping(b -> { + b.startObject("_source").field("mode", "synthetic").endObject(); + b.field("subobjects", randomFrom("false", "auto")); + b.startObject("properties"); + { + b.startObject("k").field("type", "keyword").field("copy_to", "a.b.c").endObject(); + b.startObject("a").startObject("properties"); + { + b.startObject("b").startObject("properties"); + { + b.startObject("c").field("type", "keyword").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject(); + })).documentMapper(); + + CheckedConsumer document = b -> b.field("k", "hey"); + + var doc = documentMapper.parse(source(document)); + assertNotNull(doc.docs().get(0).getField("a.b.c")); + + var syntheticSource = syntheticSource(documentMapper, document); + assertEquals("{\"k\":\"hey\"}", syntheticSource); + } + + public void testCopyToObjectWithSubobjectFlattening() throws IOException { + DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("path").field("subobjects", randomFrom("false", "auto")).startObject("properties"); + { + b.startObject("k").field("type", "keyword").field("copy_to", "path.a.b.c").endObject(); + b.startObject("a").startObject("properties"); + { + b.startObject("b").startObject("properties"); + { + b.startObject("c").field("type", "keyword").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + + CheckedConsumer document = b -> { + b.startObject("path"); + b.field("k", "hey"); + b.endObject(); + }; + + var doc = documentMapper.parse(source(document)); + assertNotNull(doc.docs().get(0).getField("path.a.b.c")); + + var syntheticSource = syntheticSource(documentMapper, document); + assertEquals("{\"path\":{\"k\":\"hey\"}}", syntheticSource); + } + protected void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) throws IOException { // We exclude ignored source field since in some cases it contains an exact copy of a part of document source. diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 3312c94e8a0e1..4bc91b793d049 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -354,12 +354,8 @@ public void testSubobjectsFalse() throws Exception { b.field("subobjects", false); b.startObject("properties"); { - b.startObject("time"); - b.field("type", "long"); - b.endObject(); - b.startObject("time.max"); - b.field("type", "long"); - b.endObject(); + b.startObject("time").field("type", "long").endObject(); + b.startObject("time.max").field("type", "long").endObject(); } b.endObject(); } @@ -380,9 +376,7 @@ public void testSubobjectsFalseWithInnerObject() throws IOException { { b.startObject("properties"); { - b.startObject("max"); - b.field("type", "long"); - b.endObject(); + b.startObject("max").field("type", "long").endObject(); } b.endObject(); } @@ -403,9 +397,7 @@ public void testSubobjectsFalseWithInnerNested() { b.field("subobjects", false); b.startObject("properties"); { - b.startObject("time"); - b.field("type", "nested"); - b.endObject(); + b.startObject("time").field("type", "nested").endObject(); } b.endObject(); } @@ -419,12 +411,8 @@ public void testSubobjectsFalseWithInnerNested() { public void testSubobjectsFalseRoot() throws Exception { MapperService mapperService = createMapperService(mappingNoSubobjects(b -> { - b.startObject("metrics.service.time"); - b.field("type", "long"); - b.endObject(); - b.startObject("metrics.service.time.max"); - b.field("type", "long"); - b.endObject(); + b.startObject("metrics.service.time").field("type", "long").endObject(); + b.startObject("metrics.service.time.max").field("type", "long").endObject(); })); assertNotNull(mapperService.fieldType("metrics.service.time")); assertNotNull(mapperService.fieldType("metrics.service.time.max")); @@ -441,9 +429,7 @@ public void testSubobjectsFalseRootWithInnerObject() throws IOException { { b.startObject("properties"); { - b.startObject("max"); - b.field("type", "long"); - b.endObject(); + b.startObject("max").field("type", "long").endObject(); } b.endObject(); } @@ -455,9 +441,7 @@ public void testSubobjectsFalseRootWithInnerObject() throws IOException { public void testSubobjectsFalseRootWithInnerNested() { MapperParsingException exception = expectThrows(MapperParsingException.class, () -> createMapperService(mappingNoSubobjects(b -> { - b.startObject("metrics.service"); - b.field("type", "nested"); - b.endObject(); + b.startObject("metrics.service").field("type", "nested").endObject(); }))); assertEquals( "Failed to parse mapping: Tried to add nested object [metrics.service] to object [_doc] which does not support subobjects", @@ -473,8 +457,7 @@ public void testSubobjectsCannotBeUpdated() throws IOException { "_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(BytesReference.bytes(fieldMapping(b -> { - b.field("type", "object"); - b.field("subobjects", "false"); + b.field("type", "object").field("subobjects", "false"); }))) ); MapperException exception = expectThrows( @@ -509,12 +492,8 @@ public void testSubobjectsAuto() throws Exception { b.field("subobjects", "auto"); b.startObject("properties"); { - b.startObject("time"); - b.field("type", "long"); - b.endObject(); - b.startObject("time.max"); - b.field("type", "long"); - b.endObject(); + b.startObject("time").field("type", "long").endObject(); + b.startObject("time.max").field("type", "long").endObject(); b.startObject("attributes"); { b.field("type", "object"); @@ -531,7 +510,7 @@ public void testSubobjectsAuto() throws Exception { assertNotNull(mapperService.documentMapper().mappers().objectMappers().get("metrics.service.attributes")); } - public void testSubobjectsAutoWithInnerObject() throws IOException { + public void testSubobjectsAutoWithInnerFlattenableObject() throws IOException { MapperService mapperService = createMapperService(mapping(b -> { b.startObject("metrics.service"); { @@ -542,16 +521,12 @@ public void testSubobjectsAutoWithInnerObject() throws IOException { { b.startObject("properties"); { - b.startObject("max"); - b.field("type", "long"); - b.endObject(); + b.startObject("max").field("type", "long").endObject(); } b.endObject(); } b.endObject(); - b.startObject("foo"); - b.field("type", "keyword"); - b.endObject(); + b.startObject("foo").field("type", "keyword").endObject(); } b.endObject(); } @@ -560,11 +535,11 @@ public void testSubobjectsAutoWithInnerObject() throws IOException { assertNull(mapperService.fieldType("metrics.service.time")); assertNotNull(mapperService.fieldType("metrics.service.time.max")); assertNotNull(mapperService.fieldType("metrics.service.foo")); - assertNotNull(mapperService.documentMapper().mappers().objectMappers().get("metrics.service.time")); + assertNull(mapperService.documentMapper().mappers().objectMappers().get("metrics.service.time")); // Gets flattened. assertNotNull(mapperService.documentMapper().mappers().getMapper("metrics.service.foo")); } - public void testSubobjectsAutoWithInnerNested() throws IOException { + public void testSubobjectsAutoWithInnerNonFlattenableObject() throws IOException { MapperService mapperService = createMapperService(mapping(b -> { b.startObject("metrics.service"); { @@ -572,8 +547,36 @@ public void testSubobjectsAutoWithInnerNested() throws IOException { b.startObject("properties"); { b.startObject("time"); - b.field("type", "nested"); + { + b.field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true); + b.startObject("properties"); + { + b.startObject("max").field("type", "long").endObject(); + } + b.endObject(); + } b.endObject(); + b.startObject("foo").field("type", "keyword").endObject(); + } + b.endObject(); + } + b.endObject(); + })); + assertNull(mapperService.fieldType("metrics.service.time")); + assertNotNull(mapperService.fieldType("metrics.service.time.max")); + assertNotNull(mapperService.fieldType("metrics.service.foo")); + assertNotNull(mapperService.documentMapper().mappers().objectMappers().get("metrics.service.time")); // Not flattened. + assertNotNull(mapperService.documentMapper().mappers().getMapper("metrics.service.foo")); + } + + public void testSubobjectsAutoWithInnerNested() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("metrics.service"); + { + b.field("subobjects", "auto"); + b.startObject("properties"); + { + b.startObject("time").field("type", "nested").endObject(); } b.endObject(); } @@ -587,12 +590,8 @@ public void testSubobjectsAutoWithInnerNested() throws IOException { public void testSubobjectsAutoRoot() throws Exception { MapperService mapperService = createMapperService(mappingWithSubobjects(b -> { - b.startObject("metrics.service.time"); - b.field("type", "long"); - b.endObject(); - b.startObject("metrics.service.time.max"); - b.field("type", "long"); - b.endObject(); + b.startObject("metrics.service.time").field("type", "long").endObject(); + b.startObject("metrics.service.time.max").field("type", "long").endObject(); b.startObject("metrics.attributes"); { b.field("type", "object"); @@ -605,15 +604,13 @@ public void testSubobjectsAutoRoot() throws Exception { assertNotNull(mapperService.documentMapper().mappers().objectMappers().get("metrics.attributes")); } - public void testSubobjectsAutoRootWithInnerObject() throws IOException { + public void testSubobjectsAutoRootWithInnerFlattenableObject() throws IOException { MapperService mapperService = createMapperService(mappingWithSubobjects(b -> { b.startObject("metrics.service.time"); { b.startObject("properties"); { - b.startObject("max"); - b.field("type", "long"); - b.endObject(); + b.startObject("max").field("type", "long").endObject(); } b.endObject(); } @@ -621,8 +618,48 @@ public void testSubobjectsAutoRootWithInnerObject() throws IOException { }, "auto")); assertNull(mapperService.fieldType("metrics.service.time")); assertNotNull(mapperService.fieldType("metrics.service.time.max")); - assertNotNull(mapperService.documentMapper().mappers().objectMappers().get("metrics.service.time")); - assertNotNull(mapperService.documentMapper().mappers().getMapper("metrics.service.time.max")); + assertNull(mapperService.documentMapper().mappers().objectMappers().get("metrics.service.time")); // Gets flattened. + + Mapper innerField = mapperService.documentMapper().mappers().getMapper("metrics.service.time.max"); + assertNotNull(innerField); + assertEquals("metrics.service.time.max", innerField.leafName()); + } + + public void testSubobjectsAutoRootWithInnerNonFlattenableObject() throws IOException { + MapperService mapperService = createMapperService(mappingWithSubobjects(b -> { + b.startObject("metrics").startObject("properties"); + { + b.startObject("service.time"); + { + b.field("subobjects", "auto"); + b.startObject("properties"); + { + b.startObject("path").startObject("properties"); + { + b.startObject("to").startObject("properties"); + { + b.startObject("max").field("type", "long").endObject(); + } + b.endObject().endObject(); + } + b.endObject().endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject().endObject(); + }, "auto")); + assertNull(mapperService.fieldType("metrics.service.time")); + assertNotNull(mapperService.fieldType("metrics.service.time.path.to.max")); + + ObjectMapper innerObject = mapperService.documentMapper().mappers().objectMappers().get("metrics.service.time"); // Not flattened. + assertNotNull(innerObject); + assertEquals("metrics.service.time", innerObject.leafName()); + + Mapper innerField = mapperService.documentMapper().mappers().getMapper("metrics.service.time.path.to.max"); + assertNotNull(innerField); + assertEquals("path.to.max", innerField.leafName()); } public void testSubobjectsAutoRootWithInnerNested() throws IOException { @@ -742,16 +779,7 @@ public void testFlatten() { ObjectMapper objectMapper = new ObjectMapper.Builder("parent", Optional.empty()).add( new ObjectMapper.Builder("child", Optional.empty()).add(new KeywordFieldMapper.Builder("keyword2", IndexVersion.current())) ).add(new KeywordFieldMapper.Builder("keyword1", IndexVersion.current())).build(rootContext); - List fields = objectMapper.asFlattenedFieldMappers(rootContext).stream().map(FieldMapper::fullPath).toList(); - assertThat(fields, containsInAnyOrder("parent.keyword1", "parent.child.keyword2")); - } - - public void testFlattenSubobjectsAuto() { - MapperBuilderContext rootContext = MapperBuilderContext.root(false, false); - ObjectMapper objectMapper = new ObjectMapper.Builder("parent", Optional.of(ObjectMapper.Subobjects.AUTO)).add( - new ObjectMapper.Builder("child", Optional.empty()).add(new KeywordFieldMapper.Builder("keyword2", IndexVersion.current())) - ).add(new KeywordFieldMapper.Builder("keyword1", IndexVersion.current())).build(rootContext); - List fields = objectMapper.asFlattenedFieldMappers(rootContext).stream().map(FieldMapper::fullPath).toList(); + List fields = objectMapper.asFlattenedFieldMappers(rootContext, true).stream().map(Mapper::fullPath).toList(); assertThat(fields, containsInAnyOrder("parent.keyword1", "parent.child.keyword2")); } @@ -760,7 +788,7 @@ public void testFlattenSubobjectsFalse() { ObjectMapper objectMapper = new ObjectMapper.Builder("parent", Optional.of(ObjectMapper.Subobjects.DISABLED)).add( new ObjectMapper.Builder("child", Optional.empty()).add(new KeywordFieldMapper.Builder("keyword2", IndexVersion.current())) ).add(new KeywordFieldMapper.Builder("keyword1", IndexVersion.current())).build(rootContext); - List fields = objectMapper.asFlattenedFieldMappers(rootContext).stream().map(FieldMapper::fullPath).toList(); + List fields = objectMapper.asFlattenedFieldMappers(rootContext, true).stream().map(Mapper::fullPath).toList(); assertThat(fields, containsInAnyOrder("parent.keyword1", "parent.child.keyword2")); } @@ -772,7 +800,7 @@ public void testFlattenDynamicIncompatible() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> objectMapper.asFlattenedFieldMappers(rootContext) + () -> objectMapper.asFlattenedFieldMappers(rootContext, true) ); assertEquals( "Object mapper [parent.child] was found in a context where subobjects is set to false. " @@ -788,7 +816,7 @@ public void testFlattenEnabledFalse() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> objectMapper.asFlattenedFieldMappers(rootContext) + () -> objectMapper.asFlattenedFieldMappers(rootContext, true) ); assertEquals( "Object mapper [parent] was found in a context where subobjects is set to false. " @@ -797,13 +825,30 @@ public void testFlattenEnabledFalse() { ); } + public void testFlattenSubobjectsAuto() { + MapperBuilderContext rootContext = MapperBuilderContext.root(false, false); + ObjectMapper objectMapper = new ObjectMapper.Builder("parent", Optional.of(ObjectMapper.Subobjects.AUTO)).add( + new ObjectMapper.Builder("child", Optional.empty()).add(new KeywordFieldMapper.Builder("keyword2", IndexVersion.current())) + ).add(new KeywordFieldMapper.Builder("keyword1", IndexVersion.current())).build(rootContext); + + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> objectMapper.asFlattenedFieldMappers(rootContext, true) + ); + assertEquals( + "Object mapper [parent] was found in a context where subobjects is set to false. " + + "Auto-flattening [parent] failed because the value of [subobjects] is [auto]", + exception.getMessage() + ); + } + public void testFlattenExplicitSubobjectsTrue() { MapperBuilderContext rootContext = MapperBuilderContext.root(false, false); ObjectMapper objectMapper = new ObjectMapper.Builder("parent", Optional.of(ObjectMapper.Subobjects.ENABLED)).build(rootContext); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> objectMapper.asFlattenedFieldMappers(rootContext) + () -> objectMapper.asFlattenedFieldMappers(rootContext, true) ); assertEquals( "Object mapper [parent] was found in a context where subobjects is set to false. " diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java index ba4d09566ef63..cb9927be732f6 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java @@ -565,9 +565,10 @@ public void testAsyncBlockOperationsOnTimeout() { assertEquals( "timeout while blocking operations after [0s]", - asInstanceOf( + safeAwaitFailure( ElasticsearchTimeoutException.class, - safeAwaitFailure(Releasable.class, f -> permits.blockOperations(f, 0, TimeUnit.SECONDS, threadPool.generic())) + Releasable.class, + f -> permits.blockOperations(f, 0, TimeUnit.SECONDS, threadPool.generic()) ).getMessage() ); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index ec70f1f7adcfd..f15506676dc39 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2169,16 +2169,14 @@ public void testShardCanNotBeMarkedAsRelocatedIfRelocationCancelled() throws IOE final ShardRouting relocationRouting = ShardRoutingHelper.relocate(originalRouting, "other_node"); IndexShardTestCase.updateRoutingEntry(shard, relocationRouting); IndexShardTestCase.updateRoutingEntry(shard, originalRouting); - asInstanceOf( + safeAwaitFailure( IllegalIndexShardStateException.class, - safeAwaitFailure( - Void.class, - listener -> shard.relocated( - relocationRouting.relocatingNodeId(), - relocationRouting.getTargetRelocatingShard().allocationId().getId(), - (primaryContext, l) -> fail("should not be called"), - listener - ) + Void.class, + listener -> shard.relocated( + relocationRouting.relocatingNodeId(), + relocationRouting.getTargetRelocatingShard().allocationId().getId(), + (primaryContext, l) -> fail("should not be called"), + listener ) ); closeShards(shard); @@ -2263,16 +2261,14 @@ public void testRelocateMismatchedTarget() throws Exception { final AtomicBoolean relocated = new AtomicBoolean(); - final IllegalIndexShardStateException wrongNodeException = asInstanceOf( + final IllegalIndexShardStateException wrongNodeException = safeAwaitFailure( IllegalIndexShardStateException.class, - safeAwaitFailure( - Void.class, - listener -> shard.relocated( - wrongTargetNodeShardRouting.relocatingNodeId(), - wrongTargetNodeShardRouting.getTargetRelocatingShard().allocationId().getId(), - (ctx, l) -> relocated.set(true), - listener - ) + Void.class, + listener -> shard.relocated( + wrongTargetNodeShardRouting.relocatingNodeId(), + wrongTargetNodeShardRouting.getTargetRelocatingShard().allocationId().getId(), + (ctx, l) -> relocated.set(true), + listener ) ); assertThat( @@ -2281,16 +2277,14 @@ public void testRelocateMismatchedTarget() throws Exception { ); assertFalse(relocated.get()); - final IllegalStateException wrongTargetIdException = asInstanceOf( + final IllegalStateException wrongTargetIdException = safeAwaitFailure( IllegalStateException.class, - safeAwaitFailure( - Void.class, - listener -> shard.relocated( - wrongTargetAllocationIdShardRouting.relocatingNodeId(), - wrongTargetAllocationIdShardRouting.getTargetRelocatingShard().allocationId().getId(), - (ctx, l) -> relocated.set(true), - listener - ) + Void.class, + listener -> shard.relocated( + wrongTargetAllocationIdShardRouting.relocatingNodeId(), + wrongTargetAllocationIdShardRouting.getTargetRelocatingShard().allocationId().getId(), + (ctx, l) -> relocated.set(true), + listener ) ); assertThat( @@ -4191,7 +4185,7 @@ protected void commitIndexWriter(final IndexWriter writer, final Translog transl ); shard.flushOnIdle(0); assertFalse(shard.isActive()); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); // While the first flush is happening, index one more doc (to turn the shard's active flag to true), // and issue a second flushOnIdle request which should not wait for the ongoing flush @@ -4206,7 +4200,7 @@ protected void commitIndexWriter(final IndexWriter writer, final Translog transl ) ); shard.flushOnIdle(0); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); // A direct call to flush (with waitIfOngoing=false) should not wait and return false immediately assertFalse(shard.flush(new FlushRequest().waitIfOngoing(false).force(false))); @@ -4223,7 +4217,7 @@ protected void commitIndexWriter(final IndexWriter writer, final Translog transl "released flush lock" ) ); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); // The second flushOnIdle (that did not happen) should have turned the active flag to true assertTrue(shard.isActive()); diff --git a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java index 88e74c2bc5151..c31a68f36de71 100644 --- a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java +++ b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; @@ -18,11 +19,15 @@ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; +import java.util.Arrays; import java.util.Random; +import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.in; public class SlicedInputStreamTests extends ESTestCase { + public void testReadRandom() throws IOException { int parts = randomIntBetween(1, 20); ByteArrayOutputStream stream = new ByteArrayOutputStream(); @@ -79,6 +84,42 @@ protected InputStream openSlice(int slice) throws IOException { } } + public void testSkip() throws IOException { + final int slices = randomIntBetween(1, 20); + final var bytes = randomByteArrayOfLength(randomIntBetween(1000, 10000)); + final int sliceSize = bytes.length / slices; + + final var streamsOpened = new ArrayList(); + SlicedInputStream input = new SlicedInputStream(slices) { + @Override + protected InputStream openSlice(int slice) throws IOException { + final int sliceOffset = slice * sliceSize; + final int length = slice == slices - 1 ? bytes.length - sliceOffset : sliceSize; + final var stream = new CheckClosedInputStream(new ByteArrayInputStream(bytes, sliceOffset, length)); + streamsOpened.add(stream); + return stream; + } + }; + + // Skip up to a random point + final int skip = randomIntBetween(0, bytes.length); + input.skipNBytes(skip); + + // Read all remaining bytes, which should be the bytes from skip up to the end + final int remainingBytes = bytes.length - skip; + if (remainingBytes > 0) { + final var remainingBytesRead = new byte[remainingBytes]; + input.readNBytes(remainingBytesRead, 0, remainingBytes); + final var expectedRemainingBytes = Arrays.copyOfRange(bytes, skip, bytes.length); + assertArrayEquals(expectedRemainingBytes, remainingBytesRead); + } + + // Confirm we reached the end and close the stream + assertThat(input.read(), equalTo(-1)); + input.close(); + streamsOpened.forEach(stream -> assertTrue(stream.closed)); + } + public void testRandomMarkReset() throws IOException { final int slices = randomIntBetween(1, 20); final var bytes = randomByteArrayOfLength(randomIntBetween(1000, 10000)); @@ -96,13 +137,17 @@ protected InputStream openSlice(int slice) throws IOException { } }; - // Read up to a random point + // Read or skip up to a random point final int mark = randomIntBetween(0, bytes.length); if (mark > 0) { - final var bytesReadUntilMark = new byte[mark]; - input.readNBytes(bytesReadUntilMark, 0, mark); - final var expectedBytesUntilMark = new ByteArrayInputStream(bytes, 0, mark).readAllBytes(); - assertArrayEquals(expectedBytesUntilMark, bytesReadUntilMark); + if (randomBoolean()) { + final var bytesReadUntilMark = new byte[mark]; + input.readNBytes(bytesReadUntilMark, 0, mark); + final var expectedBytesUntilMark = Arrays.copyOfRange(bytes, 0, mark); + assertArrayEquals(expectedBytesUntilMark, bytesReadUntilMark); + } else { + input.skipNBytes(mark); + } } // Reset should throw since there is no mark @@ -111,13 +156,22 @@ protected InputStream openSlice(int slice) throws IOException { // Mark input.mark(randomNonNegativeInt()); - // Read up to another random point + // Read or skip up to another random point final int moreBytes = randomIntBetween(0, bytes.length - mark); if (moreBytes > 0) { - final var moreBytesRead = new byte[moreBytes]; - input.readNBytes(moreBytesRead, 0, moreBytes); - final var expectedMoreBytes = new ByteArrayInputStream(bytes, mark, moreBytes).readAllBytes(); - assertArrayEquals(expectedMoreBytes, moreBytesRead); + if (randomBoolean()) { + final var moreBytesRead = new byte[moreBytes]; + input.readNBytes(moreBytesRead, 0, moreBytes); + final var expectedMoreBytes = Arrays.copyOfRange(bytes, mark, mark + moreBytes); + assertArrayEquals(expectedMoreBytes, moreBytesRead); + } else { + input.skipNBytes(moreBytes); + } + } + + // Randomly read to EOF + if (randomBoolean()) { + input.readAllBytes(); } // Reset @@ -128,7 +182,7 @@ protected InputStream openSlice(int slice) throws IOException { if (remainingBytes > 0) { final var remainingBytesRead = new byte[remainingBytes]; input.readNBytes(remainingBytesRead, 0, remainingBytes); - final var expectedRemainingBytes = new ByteArrayInputStream(bytes, mark, remainingBytes).readAllBytes(); + final var expectedRemainingBytes = Arrays.copyOfRange(bytes, mark, bytes.length); assertArrayEquals(expectedRemainingBytes, remainingBytesRead); } @@ -138,6 +192,67 @@ protected InputStream openSlice(int slice) throws IOException { streamsOpened.forEach(stream -> assertTrue(stream.closed)); } + public void testMarkSkipResetInBigSlice() throws IOException { + SlicedInputStream input = new SlicedInputStream(1) { + @Override + protected InputStream openSlice(int slice) throws IOException { + assertThat(slice, equalTo(0)); + return new IncreasingBytesUnlimitedInputStream(); + } + }; + + // Buffer to use for reading a few KiB from a start byte of IncreasingBytesUnlimitedInputStream, to verify expected bytes. + final byte[] buffer = new byte[Math.toIntExact(ByteSizeValue.ofKb(randomIntBetween(1, 8)).getBytes())]; + Consumer readAndAssert = (start) -> { + try { + final int read = input.read(buffer); + assertThat("Unexpected number of bytes read", read, equalTo(buffer.length)); + for (int i = 0; i < read; i++) { + assertThat("Unexpected value for startByte=" + start + " and i=" + i, buffer[i], equalTo((byte) ((start + i) % 255))); + } + } catch (IOException e) { + throw new AssertionError(e); + } + }; + + // Skip up to a random point that is larger than 2GiB so that the marked offset is larger than an int (ES-9639). + final long mark = randomLongBetween(Integer.MAX_VALUE, Long.MAX_VALUE - buffer.length); + input.skipNBytes(mark); + + // Mark + input.mark(randomNonNegativeInt()); + + // Skip a large amount of bytes + final long skipTo = randomLongBetween(mark, Long.MAX_VALUE - buffer.length); + input.skipNBytes(skipTo - mark); + + // Read a few KiB, asserting the bytes are what they are expected + readAndAssert.accept(skipTo); + + // Reset + input.reset(); + + // Read a few KiB, asserting the bytes are what they are expected + readAndAssert.accept(mark); + } + + public void testMarkBeyondEOF() throws IOException { + final int slices = randomIntBetween(1, 20); + SlicedInputStream input = new SlicedInputStream(slices) { + @Override + protected InputStream openSlice(int slice) throws IOException { + return new ByteArrayInputStream(new byte[] { 0 }, 0, 1); + } + }; + + input.readAllBytes(); + assertThat(input.read(), equalTo(-1)); + input.mark(randomNonNegativeInt()); + assertThat(input.read(), equalTo(-1)); + input.reset(); + assertThat(input.read(), equalTo(-1)); + } + public void testMarkResetClosedStream() throws IOException { final int slices = randomIntBetween(1, 20); SlicedInputStream input = new SlicedInputStream(slices) { @@ -147,7 +262,7 @@ protected InputStream openSlice(int slice) throws IOException { } }; - input.skip(randomIntBetween(1, slices)); + input.skipNBytes(randomIntBetween(1, slices)); input.mark(randomNonNegativeInt()); input.close(); // SlicedInputStream supports reading -1 after close without throwing @@ -232,4 +347,19 @@ public void close() throws IOException { super.close(); } } + + private static final class IncreasingBytesUnlimitedInputStream extends InputStream { + long currentByte = 0; + + @Override + public int read() throws IOException { + return (int) (currentByte++ % 255); + } + + @Override + public long skip(long n) throws IOException { + currentByte += n; + return n; + } + } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java index 46d03275ac3ce..619714119a05e 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java @@ -128,6 +128,8 @@ public void afterIndexRemoved(Index index, IndexSettings indexSettings, IndexRem newRouting = ShardRoutingHelper.moveToStarted(newRouting); IndexShardTestCase.updateRoutingEntry(shard, newRouting); assertEquals(6, counter.get()); + } catch (Exception ex) { + logger.warn("unexpected exception", ex); } finally { indicesService.removeIndex(idx, DELETED, "simon says", EsExecutors.DIRECT_EXECUTOR_SERVICE, ActionListener.noop()); } diff --git a/server/src/test/java/org/elasticsearch/monitor/fs/FsHealthServiceTests.java b/server/src/test/java/org/elasticsearch/monitor/fs/FsHealthServiceTests.java index 05b71693a7fea..b644dfbc3a12c 100644 --- a/server/src/test/java/org/elasticsearch/monitor/fs/FsHealthServiceTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/fs/FsHealthServiceTests.java @@ -146,7 +146,7 @@ public void testLoggingOnHungIO() throws Exception { disruptFileSystemProvider.injectIOException.set(true); fsHealthService.new FsHealthMonitor().run(); assertEquals(env.nodeDataPaths().length, disruptFileSystemProvider.getInjectedPathCount()); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); } finally { PathUtilsForTesting.teardown(); ThreadPool.terminate(testThreadPool, 500, TimeUnit.MILLISECONDS); diff --git a/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java b/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java index 104d578ef969b..040ab9fd5c2e9 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ParsedMediaType; import org.hamcrest.CustomTypeSafeMatcher; @@ -165,8 +164,6 @@ public void testAcceptAndContentTypeCombinations() { assertThat(requestWith(acceptHeader(null), contentTypeHeader("application/json"), bodyPresent()), not(isCompatible())); } - @UpdateForV9 - @AwaitsFix(bugUrl = "this can be re-enabled once our rest api version is bumped to V_9") public void testObsoleteVersion() { ElasticsearchStatusException e = expectThrows( ElasticsearchStatusException.class, @@ -213,14 +210,11 @@ public void testObsoleteVersion() { assertThat( e.getMessage(), equalTo( - "Content-Type version must be either version " - + CURRENT_VERSION - + " or " - + PREVIOUS_VERSION - + ", but found " - + OBSOLETE_VERSION - + ". " - + "Content-Type=" + "A compatible version is required on both Content-Type and Accept headers if either one has requested a " + + "compatible version and the compatible versions must match. " + + "Accept=" + + acceptHeader(PREVIOUS_VERSION) + + ", Content-Type=" + contentTypeHeader(OBSOLETE_VERSION) ) ); @@ -242,8 +236,8 @@ public void testMediaTypeCombinations() { assertThat( requestWith( - acceptHeader("application/vnd.elasticsearch+json;compatible-with=7"), - contentTypeHeader("application/vnd.elasticsearch+cbor;compatible-with=7"), + acceptHeader("application/vnd.elasticsearch+json;compatible-with=8"), + contentTypeHeader("application/vnd.elasticsearch+cbor;compatible-with=8"), bodyPresent() ), isCompatible() @@ -253,8 +247,8 @@ public void testMediaTypeCombinations() { expectThrows( ElasticsearchStatusException.class, () -> requestWith( - acceptHeader("application/vnd.elasticsearch+json;compatible-with=7"), - contentTypeHeader("application/vnd.elasticsearch+cbor;compatible-with=8"), + acceptHeader("application/vnd.elasticsearch+json;compatible-with=8"), + contentTypeHeader("application/vnd.elasticsearch+cbor;compatible-with=9"), bodyPresent() ) ); @@ -273,20 +267,20 @@ public void testTextMediaTypes() { // versioned assertThat( requestWith( - acceptHeader("text/vnd.elasticsearch+tab-separated-values;compatible-with=7"), - contentTypeHeader(7), + acceptHeader("text/vnd.elasticsearch+tab-separated-values;compatible-with=8"), + contentTypeHeader(8), bodyNotPresent() ), isCompatible() ); assertThat( - requestWith(acceptHeader("text/vnd.elasticsearch+plain;compatible-with=7"), contentTypeHeader(7), bodyNotPresent()), + requestWith(acceptHeader("text/vnd.elasticsearch+plain;compatible-with=8"), contentTypeHeader(8), bodyNotPresent()), isCompatible() ); assertThat( - requestWith(acceptHeader("text/vnd.elasticsearch+csv;compatible-with=7"), contentTypeHeader(7), bodyNotPresent()), + requestWith(acceptHeader("text/vnd.elasticsearch+csv;compatible-with=8"), contentTypeHeader(8), bodyNotPresent()), isCompatible() ); } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexActionTests.java index 601905635ff5e..2682a8c778168 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexActionTests.java @@ -9,29 +9,15 @@ package org.elasticsearch.rest.action.admin.indices; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Mockito.mock; - public class RestCreateIndexActionTests extends ESTestCase { public void testPrepareTypelessRequest() throws IOException { @@ -99,59 +85,4 @@ public void testMalformedMappings() throws IOException { Map source = RestCreateIndexAction.prepareMappings(contentAsMap); assertEquals(contentAsMap, source); } - - public void testIncludeTypeName() throws IOException { - RestCreateIndexAction action = new RestCreateIndexAction(); - List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", compatibleMediaType)) - .withMethod(RestRequest.Method.PUT) - .withPath("/some_index") - .withParams(params) - .build(); - - action.prepareRequest(deprecatedRequest, mock(NodeClient.class)); - assertCriticalWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index") - .build(); - action.prepareRequest(validRequest, mock(NodeClient.class)); - } - - public void testTypeInMapping() throws IOException { - RestCreateIndexAction action = new RestCreateIndexAction(); - - List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - String content = """ - { - "mappings": { - "some_type": { - "properties": { - "field1": { - "type": "text" - } - } - } - } - }"""; - - Map params = new HashMap<>(); - params.put(RestCreateIndexAction.INCLUDE_TYPE_NAME_PARAMETER, "true"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) - .withPath("/some_index") - .withParams(params) - .withContent(new BytesArray(content), null) - .build(); - - CreateIndexRequest createIndexRequest = RestCreateIndexAction.prepareRequestV7(request); - // some_type is replaced with _doc - assertThat(createIndexRequest.mappings(), equalTo(""" - {"_doc":{"properties":{"field1":{"type":"text"}}}}""")); - assertCriticalWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE); - } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java deleted file mode 100644 index 33e7b7fa21382..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.admin.indices; - -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public final class RestGetIndicesActionTests extends ESTestCase { - final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - /** - * Test that setting the "include_type_name" parameter raises a warning for the GET request - */ - public void testIncludeTypeNamesWarning() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index").withParams(params).build(); - - RestGetIndicesAction handler = new RestGetIndicesAction(); - handler.prepareRequest(request, mock(NodeClient.class)); - assertCriticalWarnings(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE); - - // the same request without the parameter should pass without warning - request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index").build(); - handler.prepareRequest(request, mock(NodeClient.class)); - } - - /** - * Test that setting the "include_type_name" parameter doesn't raises a warning if the HEAD method is used (indices.exists) - */ - public void testIncludeTypeNamesWarningExists() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.HEAD).withPath("/some_index").withParams(params).build(); - - RestGetIndicesAction handler = new RestGetIndicesAction(); - handler.prepareRequest(request, mock(NodeClient.class)); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java deleted file mode 100644 index 5728e902aff6b..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.admin.indices; - -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public final class RestPutIndexTemplateActionTests extends ESTestCase { - final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - private RestPutIndexTemplateAction action; - - @Before - public void setUpAction() { - action = new RestPutIndexTemplateAction(); - } - - public void testIncludeTypeName() throws IOException { - XContentBuilder typedContent = XContentFactory.jsonBuilder() - .startObject() - .startObject("mappings") - .startObject("my_doc") - .startObject("properties") - .startObject("field1") - .field("type", "keyword") - .endObject() - .startObject("field2") - .field("type", "text") - .endObject() - .endObject() - .endObject() - .endObject() - .startObject("aliases") - .startObject("read_alias") - .endObject() - .endObject() - .endObject(); - - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ) - .withMethod(RestRequest.Method.PUT) - .withParams(params) - .withPath("/_template/_some_template") - .withContent(BytesReference.bytes(typedContent), null) - .build(); - action.prepareRequest(request, mock(NodeClient.class)); - assertCriticalWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java index 2d719c1ed537d..16e651a12c4d6 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -19,10 +19,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.AbstractSearchTestCase; @@ -41,7 +38,6 @@ import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static java.util.Collections.emptyMap; @@ -159,37 +155,4 @@ private RestRequest createRestRequest(String content) { .withContent(new BytesArray(content), XContentType.JSON) .build(); } - - public void testTypeInPath() { - List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", compatibleMediaType)) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_validate/query") - .build(); - - performRequest(request); - assertCriticalWarnings(RestValidateQueryAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - Map params = new HashMap<>(); - params.put("type", "some_type"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", compatibleMediaType)) - .withMethod(RestRequest.Method.GET) - .withPath("_validate/query") - .withParams(params) - .build(); - - performRequest(request); - assertCriticalWarnings(RestValidateQueryAction.TYPES_DEPRECATION_MESSAGE); - } - - private void performRequest(RestRequest request) { - RestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller.dispatchRequest(request, channel, threadContext); - } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java deleted file mode 100644 index d9141002eb32c..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.document; - -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; -import org.mockito.Mockito; - -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public final class RestDeleteActionTests extends RestActionTestCase { - - final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestDeleteAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(DeleteResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(DeleteResponse.class)); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", contentTypeHeader)) - .withMethod(RestRequest.Method.DELETE) - .withPath("/some_index/some_type/some_id") - .build(); - dispatchRequest(request); - assertCriticalWarnings(RestDeleteAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", contentTypeHeader)) - .withMethod(RestRequest.Method.DELETE) - .withPath("/some_index/_doc/some_id") - .build(); - dispatchRequest(validRequest); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java deleted file mode 100644 index 0a9abacd82635..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.document; - -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; -import org.mockito.Mockito; - -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.instanceOf; - -public final class RestGetActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestGetAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> { - assertThat(request, instanceOf(GetRequest.class)); - return Mockito.mock(GetResponse.class); - }); - } - - public void testTypeInPath() { - testTypeInPath(RestRequest.Method.GET); - testTypeInPath(RestRequest.Method.HEAD); - } - - private void testTypeInPath(RestRequest.Method method) { - FakeRestRequest.Builder deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withPath("/some_index/some_type/some_id"); - dispatchRequest(deprecatedRequest.withMethod(method).build()); - assertCriticalWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java index 17840990d5b10..7cec10299280e 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -26,12 +25,6 @@ import org.junit.Before; import org.mockito.Mockito; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import static java.util.Collections.emptyMap; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.rest.RestStatus.OK; @@ -43,7 +36,6 @@ public final class RestGetSourceActionTests extends RestActionTestCase { private static RestRequest request = new FakeRestRequest(); private static FakeRestChannel channel = new FakeRestChannel(request, true, 0); private static RestGetSourceResponseListener listener = new RestGetSourceResponseListener(channel, request); - private final List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before public void setUpAction() { @@ -89,36 +81,4 @@ public void testRestGetSourceActionWithMissingDocumentSource() { assertThat(exception.getMessage(), equalTo("Source not found [index1]/[1]")); } - - /** - * test deprecation is logged if type is used in path - */ - public void testTypeInPath() { - for (RestRequest.Method method : Arrays.asList(RestRequest.Method.GET, RestRequest.Method.HEAD)) { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", compatibleMediaType)) - .withMethod(method) - .withPath("/some_index/some_type/id/_source") - .build(); - dispatchRequest(request); - assertCriticalWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); - } - } - - /** - * test deprecation is logged if type is used as parameter - */ - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - for (RestRequest.Method method : Arrays.asList(RestRequest.Method.GET, RestRequest.Method.HEAD)) { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", compatibleMediaType)) - .withMethod(method) - .withPath("/some_index/_source/id") - .withParams(params) - .build(); - dispatchRequest(request); - assertCriticalWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); - } - } - } diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java index b7f0fa3c1c707..1aa53382666ef 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.document.RestIndexAction.AutoIdHandler; @@ -29,18 +28,12 @@ import org.elasticsearch.xcontent.XContentType; import org.junit.Before; -import java.util.Collections; -import java.util.List; -import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public final class RestIndexActionTests extends RestActionTestCase { - - final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - private final AtomicReference clusterStateSupplier = new AtomicReference<>(); @Before @@ -85,34 +78,4 @@ private void checkAutoIdOpType(Version minClusterVersion, DocWriteRequest.OpType dispatchRequest(autoIdRequest); assertThat(executeCalled.get(), equalTo(true)); } - - public void testTypeInPath() { - // using CompatibleRestIndexAction - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) - .withPath("/some_index/some_type/some_id") - .build(); - dispatchRequest(deprecatedRequest); - assertCriticalWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testCreateWithTypeInPath() { - // using CompatibleCreateHandler - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) - .withPath("/some_index/some_type/some_id/_create") - .build(); - dispatchRequest(deprecatedRequest); - assertCriticalWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testAutoIdWithType() { - // using CompatibleAutoIdHandler - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) - .withPath("/some_index/some_type/") - .build(); - dispatchRequest(deprecatedRequest); - assertCriticalWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE); - } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java deleted file mode 100644 index ed793f3127a93..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.document; - -import org.elasticsearch.action.get.MultiGetRequest; -import org.elasticsearch.action.get.MultiGetResponse; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; -import org.mockito.Mockito; - -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.instanceOf; - -public final class RestMultiGetActionTests extends RestActionTestCase { - XContentType VND_TYPE = randomVendorType(); - List contentTypeHeader = Collections.singletonList(compatibleMediaType(VND_TYPE, RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestMultiGetAction(Settings.EMPTY)); - verifyingClient.setExecuteVerifier((actionType, request) -> { - assertThat(request, instanceOf(MultiGetRequest.class)); - return Mockito.mock(MultiGetResponse.class); - }); - } - - public void testTypeInPath() { - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("some_index/some_type/_mget").build(); - dispatchRequest(deprecatedRequest); - assertCriticalWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() throws Exception { - XContentBuilder content = XContentFactory.contentBuilder(VND_TYPE) - .startObject() - .startArray("docs") - .startObject() - .field("_index", "some_index") - .field("_type", "_doc") - .field("_id", "2") - .endObject() - .startObject() - .field("_index", "test") - .field("_id", "2") - .endObject() - .endArray() - .endObject(); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("_mget") - .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) - .withContent(BytesReference.bytes(content), null) - .build(); - dispatchRequest(request); - assertCriticalWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE); - } - -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java deleted file mode 100644 index 0e247d70b2ba3..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.document; - -import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; -import org.mockito.Mockito; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public final class RestMultiTermVectorsActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestMultiTermVectorsAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(MultiTermVectorsResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(MultiTermVectorsResponse.class)); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.POST).withPath("/some_index/some_type/_mtermvectors").build(); - - dispatchRequest(request); - assertCriticalWarnings(RestMultiTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withPath("/some_index/_mtermvectors").withParams(params).build(); - - dispatchRequest(request); - assertCriticalWarnings(RestMultiTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder() - .startObject() - .startArray("docs") - .startObject() - .field("_type", "some_type") - .field("_id", 1) - .endObject() - .endArray() - .endObject(); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ) - .withMethod(RestRequest.Method.POST) - .withPath("/some_index/_mtermvectors") - .withContent(BytesReference.bytes(content), null) - .build(); - - dispatchRequest(request); - assertCriticalWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java deleted file mode 100644 index a69c167c12729..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.document; - -import org.elasticsearch.action.termvectors.TermVectorsResponse; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; -import org.mockito.Mockito; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public final class RestTermVectorsActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestTermVectorsAction()); - // todo how to workaround this? we get AssertionError without this - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(TermVectorsResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(TermVectorsResponse.class)); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.POST).withPath("/some_index/some_type/some_id/_termvectors").build(); - - dispatchRequest(request); - assertCriticalWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder().startObject().field("_type", "some_type").field("_id", 1).endObject(); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/_termvectors/some_id") - .withContent(BytesReference.bytes(content), null) - .build(); - - dispatchRequest(request); - assertCriticalWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java index def6e8eb0375d..c68867649e25a 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.rest.FakeRestRequest; @@ -22,17 +21,13 @@ import org.junit.Before; import org.mockito.Mockito; -import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; import static org.mockito.Mockito.mock; public final class RestUpdateActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - private RestUpdateAction action; @Before @@ -76,17 +71,4 @@ public void testUpdateDocVersion() { ) ); } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.POST).withPath("/some_index/some_type/some_id/_update").build(); - dispatchRequest(request); - assertCriticalWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.DELETE).withPath("/some_index/_update/some_id").build(); - dispatchRequest(validRequest); - } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java deleted file mode 100644 index e72511989f083..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.search; - -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestRequest.Method; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; -import org.mockito.Mockito; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.instanceOf; - -public final class RestCountActionTests extends RestActionTestCase { - - final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestCountAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> { - assertThat(request, instanceOf(SearchRequest.class)); - return Mockito.mock(SearchResponse.class); - }); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", contentTypeHeader)) - .withMethod(Method.POST) - .withPath("/some_index/some_type/_count") - .build(); - - dispatchRequest(request); - assertCriticalWarnings(RestCountAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", contentTypeHeader)) - .withMethod(Method.GET) - .withPath("/some_index/_count") - .withParams(params) - .build(); - - dispatchRequest(request); - assertCriticalWarnings(RestCountAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java deleted file mode 100644 index ddbe7243d304b..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.search; - -import org.elasticsearch.action.explain.ExplainResponse; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; -import org.mockito.Mockito; - -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public final class RestExplainActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - @Before - public void setUpAction() { - RestExplainAction action = new RestExplainAction(); - controller().registerHandler(action); - verifyingClient.setExecuteVerifier((actionType, request) -> Mockito.mock(ExplainResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> Mockito.mock(ExplainResponse.class)); - } - - public void testTypeInPath() { - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", contentTypeHeader)) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/some_id/_explain") - .build(); - dispatchRequest(deprecatedRequest); - assertCriticalWarnings(RestExplainAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(Map.of("Accept", contentTypeHeader)) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/_explain/some_id") - .build(); - dispatchRequest(validRequest); - } - -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java deleted file mode 100644 index b77817e4c0258..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.rest.action.search; - -import org.elasticsearch.action.search.MultiSearchResponse; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.usage.UsageService; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; - -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.mockito.Mockito.mock; - -public final class RestMultiSearchActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - - @Before - public void setUpAction() { - RestMultiSearchAction action = new RestMultiSearchAction(Settings.EMPTY, new UsageService().getSearchUsageHolder(), nf -> false); - controller().registerHandler(action); - verifyingClient.setExecuteVerifier((actionType, request) -> mock(MultiSearchResponse.class)); - verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(MultiSearchResponse.class)); - } - - public void testTypeInPath() { - String content = "{ \"index\": \"some_index\" } \n {} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/some_type/_msearch").withContent(bytesContent, null).build(); - - dispatchRequest(request); - assertCriticalWarnings(RestMultiSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() { - String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n {} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.POST).withPath("/some_index/_msearch").withContent(bytesContent, null).build(); - - dispatchRequest(request); - assertCriticalWarnings(RestMultiSearchAction.TYPES_DEPRECATION_MESSAGE); - } - -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index e207d150ac6cd..24f59a8c3abe7 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; @@ -23,7 +22,6 @@ import org.elasticsearch.usage.UsageService; import org.junit.Before; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -31,8 +29,6 @@ import static org.mockito.Mockito.mock; public final class RestSearchActionTests extends RestActionTestCase { - final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - private RestSearchAction action; @Before @@ -43,27 +39,6 @@ public void setUpAction() { verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchResponse.class)); } - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/some_type/_search").build(); - - dispatchRequest(request); - assertCriticalWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/_search").withParams(params).build(); - - dispatchRequest(request); - assertCriticalWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - /** * The "enable_fields_emulation" flag on search requests is a no-op but should not raise an error */ @@ -71,9 +46,10 @@ public void testEnableFieldsEmulationNoErrors() throws Exception { Map params = new HashMap<>(); params.put("enable_fields_emulation", "true"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/_search").withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) + .withPath("/some_index/_search") + .withParams(params) + .build(); action.handleRequest(request, new FakeRestChannel(request, false, 1), verifyingClient); } @@ -83,9 +59,10 @@ public void testValidateSearchRequest() { Map params = new HashMap<>(); params.put("rest_total_hits_as_int", "true"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/_search").withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) + .withPath("/some_index/_search") + .withParams(params) + .build(); SearchRequest searchRequest = new SearchRequest(); searchRequest.source(new SearchSourceBuilder().trackTotalHitsUpTo(100)); @@ -100,9 +77,10 @@ public void testValidateSearchRequest() { Map params = new HashMap<>(); params.put("search_type", randomFrom(SearchType.values()).name()); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/_search").withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) + .withPath("/some_index/_search") + .withParams(params) + .build(); SearchRequest searchRequest = new SearchRequest(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", new float[] { 1, 1, 1 }, 10, 100, null); @@ -126,9 +104,10 @@ public void testIllegalSearchType() { Map params = new HashMap<>(); params.put("search_type", "some_search_type"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withMethod(RestRequest.Method.GET).withPath("/some_index/_search").withParams(params).build(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) + .withPath("/some_index/_search") + .withParams(params) + .build(); Exception ex = expectThrows(IllegalArgumentException.class, () -> action.prepareRequest(request, verifyingClient)); assertEquals("No search type for [some_search_type]", ex.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 0a3c2c939b456..9109cd6b89bed 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -18,12 +18,10 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.query.AbstractQueryBuilder; -import org.elasticsearch.index.query.CommonTermsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.TypeQueryV7Builder; import org.elasticsearch.index.query.functionscore.GaussDecayFunctionBuilder; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -33,7 +31,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.ChiSquare; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; @@ -60,7 +57,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -301,7 +297,6 @@ public void testRegisteredQueries() { List allSupportedQueries = new ArrayList<>(); Collections.addAll(allSupportedQueries, NON_DEPRECATED_QUERIES); Collections.addAll(allSupportedQueries, DEPRECATED_QUERIES); - Collections.addAll(allSupportedQueries, REST_COMPATIBLE_QUERIES); SearchModule module = new SearchModule(Settings.EMPTY, emptyList()); @@ -471,11 +466,6 @@ public CheckedBiConsumer getReque // add here deprecated queries to make sure we log a deprecation warnings when they are used private static final String[] DEPRECATED_QUERIES = new String[] { "field_masking_span", "geo_polygon" }; - private static final String[] REST_COMPATIBLE_QUERIES = new String[] { - TypeQueryV7Builder.NAME_V7.getPreferredName(), - CommonTermsQueryBuilder.NAME_V7.getPreferredName() }; - private static final String[] REST_COMPATIBLE_AGGREGATIONS = new String[] { - MovAvgPipelineAggregationBuilder.NAME_V7.getPreferredName() }; /** * Dummy test {@link AggregationBuilder} used to test registering aggregation builders. @@ -692,58 +682,6 @@ public String getWriteableName() { } } - static class CompatQueryBuilder extends DummyQueryBuilder { - public static final String NAME = "compat_name"; - public static final ParseField NAME_OLD = new ParseField(NAME).forRestApiVersion( - RestApiVersion.equalTo(RestApiVersion.minimumSupported()) - ); - - @Override - public String getWriteableName() { - return NAME; - } - } - - public void testRegisterRestApiCompatibleQuery() { - SearchPlugin registerCompatQuery = new SearchPlugin() { - @Override - public List> getQueries() { - return singletonList( - new QuerySpec<>( - CompatQueryBuilder.NAME_OLD, - (streamInput) -> new CompatQueryBuilder(), - CompatQueryBuilder::fromXContent - ) - ); - } - }; - - final SearchModule searchModule = new SearchModule(Settings.EMPTY, singletonList(registerCompatQuery)); - - // all entries can be used for current and previous versions except for compatible entry - assertThat(searchModule.getNamedXContents().stream().filter(e -> - // filter out compatible entry - e.name.match(CompatQueryBuilder.NAME_OLD.getPreferredName(), LoggingDeprecationHandler.INSTANCE) == false) - .filter(e -> RestApiVersion.minimumSupported().matches(e.restApiCompatibility)) - .filter(e -> RestApiVersion.current().matches(e.restApiCompatibility)) - .collect(toSet()), - // -1 because of the registered in the test - hasSize(searchModule.getNamedXContents().size() - REST_COMPATIBLE_QUERIES.length - REST_COMPATIBLE_AGGREGATIONS.length - 1) - ); - - final List compatEntry = searchModule.getNamedXContents() - .stream() - .filter( - e -> e.categoryClass.equals(QueryBuilder.class) - && RestApiVersion.minimumSupported().matches(e.name.getForRestApiVersion()) // v7 compatbile - && RestApiVersion.current().matches(e.name.getForRestApiVersion()) == false - ) // but not v8 compatible - .collect(toList()); - assertThat(compatEntry, hasSize(REST_COMPATIBLE_QUERIES.length + 1));// +1 because of registered in the test - assertTrue(RestApiVersion.minimumSupported().matches(compatEntry.get(0).restApiCompatibility)); - assertFalse(RestApiVersion.current().matches(compatEntry.get(0).restApiCompatibility)); - } - public void testDefaultMaxNestedDepth() { new SearchModule(Settings.EMPTY, emptyList()); assertEquals( diff --git a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java index 6a1874183a30f..79ba65c76ee48 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java @@ -72,7 +72,7 @@ protected SearchSortValues doParseInstance(XContentParser parser) throws IOExcep parser.nextToken(); // skip to the elements start array token, fromXContent advances from there if called parser.nextToken(); parser.nextToken(); - SearchSortValues searchSortValues = SearchSortValues.fromXContent(parser); + SearchSortValues searchSortValues = SearchResponseUtils.parseSearchSortValues(parser); parser.nextToken(); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MemoryTrackingTDigestArraysTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MemoryTrackingTDigestArraysTests.java new file mode 100644 index 0000000000000..e57f39becef7c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MemoryTrackingTDigestArraysTests.java @@ -0,0 +1,360 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.aggregations.metrics; + +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.tdigest.arrays.TDigestArrays; +import org.elasticsearch.tdigest.arrays.TDigestByteArray; +import org.elasticsearch.tdigest.arrays.TDigestDoubleArray; +import org.elasticsearch.tdigest.arrays.TDigestIntArray; +import org.elasticsearch.tdigest.arrays.TDigestLongArray; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public class MemoryTrackingTDigestArraysTests extends ESTestCase { + // Int arrays + + public void testIntEmpty() { + try (TDigestIntArray array = intArray(0)) { + assertThat(array.size(), equalTo(0)); + } + } + + public void testIntGetAndSet() { + int initialSize = randomIntBetween(10, 1000); + try (TDigestIntArray array = intArray(initialSize)) { + assertThat(array.size(), equalTo(initialSize)); + + int value = randomInt(); + for (int i = 9; i < initialSize; i++) { + array.set(i, value); + } + + for (int i = 0; i < initialSize; i++) { + if (i < 9) { + assertThat(array.get(i), equalTo(0)); + } else { + assertThat(array.get(i), equalTo(value)); + } + } + } + } + + public void testIntResize() { + int initialSize = randomIntBetween(10, 1000); + try (TDigestIntArray array = intArray(initialSize)) { + assertThat(array.size(), equalTo(initialSize)); + + // Fill with a non-zero value + int value = randomBoolean() ? randomIntBetween(Integer.MIN_VALUE, -1) : randomIntBetween(1, Integer.MAX_VALUE); + for (int i = 0; i < initialSize; i++) { + array.set(i, value); + } + + // Resize to a size-1 + array.resize(initialSize - 1); + assertThat(array.size(), equalTo(initialSize - 1)); + + for (int i = 0; i < initialSize - 1; i++) { + assertThat(array.get(i), equalTo(value)); + } + + // Resize to the original size + 1 + array.resize(initialSize + 1); + assertThat(array.size(), equalTo(initialSize + 1)); + + // Ensure all new elements are 0 + for (int i = 0; i < initialSize - 1; i++) { + if (i < initialSize) { + assertThat(array.get(i), equalTo(value)); + } else { + assertThat(array.get(i), equalTo(0)); + } + } + } + } + + public void testIntBulkSet() { + int initialSize = randomIntBetween(10, 1000); + int sourceArraySize = randomIntBetween(0, initialSize); + + try (TDigestIntArray array = intArray(initialSize); TDigestIntArray source = intArray(sourceArraySize)) { + assertThat(array.size(), equalTo(initialSize)); + assertThat(source.size(), equalTo(sourceArraySize)); + + int value = randomInt(); + for (int i = 0; i < sourceArraySize; i++) { + source.set(i, value); + } + + int initialOffset = randomIntBetween(0, initialSize - sourceArraySize); + int sourceOffset = randomIntBetween(0, sourceArraySize - 1); + int elementsToCopy = randomIntBetween(1, sourceArraySize - sourceOffset); + + array.set(initialOffset, source, sourceOffset, elementsToCopy); + + for (int i = 0; i < initialSize; i++) { + if (i < initialOffset || i >= initialOffset + elementsToCopy) { + assertThat(array.get(i), equalTo(0)); + } else { + assertThat(array.get(i), equalTo(value)); + } + } + } + } + + // Long arrays + + public void testLongEmpty() { + try (TDigestIntArray array = intArray(0)) { + assertThat(array.size(), equalTo(0)); + } + } + + public void testLongGetAndSet() { + int initialSize = randomIntBetween(10, 1000); + try (TDigestLongArray array = longArray(initialSize)) { + assertThat(array.size(), equalTo(initialSize)); + + long value = randomLong(); + for (int i = 9; i < initialSize; i++) { + array.set(i, value); + } + + for (int i = 0; i < initialSize; i++) { + if (i < 9) { + assertThat(array.get(i), equalTo(0L)); + } else { + assertThat(array.get(i), equalTo(value)); + } + } + } + } + + public void testLongResize() { + int initialSize = randomIntBetween(10, 1000); + try (TDigestLongArray array = longArray(initialSize)) { + assertThat(array.size(), equalTo(initialSize)); + + // Fill with a non-zero value + long value = randomBoolean() ? randomLongBetween(Long.MIN_VALUE, -1) : randomLongBetween(1, Long.MAX_VALUE); + for (int i = 0; i < initialSize; i++) { + array.set(i, value); + } + + // Resize to a size-1 + array.resize(initialSize - 1); + assertThat(array.size(), equalTo(initialSize - 1)); + + for (int i = 0; i < initialSize - 1; i++) { + assertThat(array.get(i), equalTo(value)); + } + + // Resize to the original size + 1 + array.resize(initialSize + 1); + assertThat(array.size(), equalTo(initialSize + 1)); + + // Ensure all new elements are 0 + for (int i = 0; i < initialSize - 1; i++) { + if (i < initialSize) { + assertThat(array.get(i), equalTo(value)); + } else { + assertThat(array.get(i), equalTo(0L)); + } + } + } + } + + // Byte arrays + + public void testByteEmpty() { + try (TDigestByteArray array = byteArray(0)) { + assertThat(array.size(), equalTo(0)); + } + } + + public void testByteGetAndSet() { + int initialSize = randomIntBetween(10, 1000); + try (TDigestByteArray array = byteArray(initialSize)) { + assertThat(array.size(), equalTo(initialSize)); + + byte value = randomByte(); + for (int i = 9; i < initialSize; i++) { + array.set(i, value); + } + + for (int i = 0; i < initialSize; i++) { + if (i < 9) { + assertThat(array.get(i), equalTo((byte) 0)); + } else { + assertThat(array.get(i), equalTo(value)); + } + } + } + } + + public void testByteResize() { + int initialSize = randomIntBetween(10, 1000); + try (TDigestByteArray array = byteArray(initialSize)) { + assertThat(array.size(), equalTo(initialSize)); + + // Fill with a non-zero value + byte value = randomBoolean() ? randomByteBetween(Byte.MIN_VALUE, (byte) -1) : randomByteBetween((byte) 1, Byte.MAX_VALUE); + for (int i = 0; i < initialSize; i++) { + array.set(i, value); + } + + // Resize to a size-1 + array.resize(initialSize - 1); + assertThat(array.size(), equalTo(initialSize - 1)); + + for (int i = 0; i < initialSize - 1; i++) { + assertThat(array.get(i), equalTo(value)); + } + + // Resize to the original size + 1 + array.resize(initialSize + 1); + assertThat(array.size(), equalTo(initialSize + 1)); + + // Ensure all new elements are 0 + for (int i = 0; i < initialSize - 1; i++) { + if (i < initialSize) { + assertThat(array.get(i), equalTo(value)); + } else { + assertThat(array.get(i), equalTo((byte) 0)); + } + } + } + } + + // Double arrays + + public void testDoubleEmpty() { + try (TDigestDoubleArray array = doubleArray(0)) { + assertThat(array.size(), equalTo(0)); + } + } + + public void testDoubleGetAndSet() { + int initialSize = randomIntBetween(10, 1000); + try (TDigestDoubleArray array = doubleArray(initialSize)) { + assertThat(array.size(), equalTo(initialSize)); + + double value = randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true); + for (int i = 9; i < initialSize; i++) { + array.set(i, value); + } + + for (int i = 0; i < initialSize; i++) { + if (i < 9) { + assertThat(array.get(i), equalTo(0.0)); + } else { + assertThat(array.get(i), equalTo(value)); + } + } + } + } + + public void testDoubleAdd() { + int initialSize = randomIntBetween(10, 1000); + try (TDigestDoubleArray array = doubleArray(initialSize)) { + assertThat(array.size(), equalTo(initialSize)); + + int newValueCount = randomIntBetween(1, 100); + if (randomBoolean()) { + array.ensureCapacity(initialSize + newValueCount); + } + double value = randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true); + for (int i = 0; i < newValueCount; i++) { + array.add(value); + } + + for (int i = 0; i < newValueCount; i++) { + if (i < initialSize) { + assertThat(array.get(i), equalTo(0.0)); + } else { + assertThat(array.get(i), equalTo(value)); + } + } + } + } + + public void testDoubleBulkSet() { + int initialSize = randomIntBetween(10, 1000); + int sourceArraySize = randomIntBetween(0, initialSize); + + try (TDigestDoubleArray array = doubleArray(initialSize); TDigestDoubleArray source = doubleArray(sourceArraySize)) { + assertThat(array.size(), equalTo(initialSize)); + assertThat(source.size(), equalTo(sourceArraySize)); + + double value = randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true); + for (int i = 0; i < sourceArraySize; i++) { + source.set(i, value); + } + + int initialOffset = randomIntBetween(0, initialSize - sourceArraySize); + int sourceOffset = randomIntBetween(0, sourceArraySize - 1); + int elementsToCopy = randomIntBetween(1, sourceArraySize - sourceOffset); + + array.set(initialOffset, source, sourceOffset, elementsToCopy); + + for (int i = 0; i < initialSize; i++) { + if (i < initialOffset || i >= initialOffset + elementsToCopy) { + assertThat(array.get(i), equalTo(0.0)); + } else { + assertThat(array.get(i), equalTo(value)); + } + } + } + } + + public void testDoubleSort() { + try (TDigestDoubleArray array = doubleArray(0)) { + int elementsToAdd = randomIntBetween(0, 100); + array.ensureCapacity(elementsToAdd); + for (int i = 0; i < elementsToAdd; i++) { + array.add(randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true)); + } + + array.sort(); + + double previous = -Double.MAX_VALUE; + for (int i = 0; i < array.size(); i++) { + double current = array.get(i); + assertThat(current, greaterThanOrEqualTo(previous)); + previous = current; + } + } + } + + // Helpers + + private TDigestIntArray intArray(int initialSize) { + return arrays().newIntArray(initialSize); + } + + private TDigestLongArray longArray(int initialSize) { + return arrays().newLongArray(initialSize); + } + + private TDigestByteArray byteArray(int initialSize) { + return arrays().newByteArray(initialSize); + } + + private TDigestDoubleArray doubleArray(int initialSize) { + return arrays().newDoubleArray(initialSize); + } + + private TDigestArrays arrays() { + return new MemoryTrackingTDigestArrays(newLimitedBreaker(ByteSizeValue.ofMb(100))); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java index e7799a133b5af..56d3d674b28ca 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java @@ -16,8 +16,9 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tdigest.arrays.TDigestArrays; -import org.elasticsearch.tdigest.arrays.WrapperTDigestArrays; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -33,140 +34,150 @@ public class TDigestStateTests extends ESTestCase { public void testMoreThan4BValues() { // Regression test for #19528 // See https://github.com/tdunning/t-digest/pull/70/files#diff-4487072cee29b939694825647928f742R439 - TDigestState digest = TDigestState.create(arrays(), 100); - for (int i = 0; i < 1000; ++i) { - digest.add(randomDouble()); - } - final int count = 1 << 29; - for (int i = 0; i < 10; ++i) { - digest.add(randomDouble(), count); - } - assertEquals(1000 + 10L * (1 << 29), digest.size()); - assertTrue(digest.size() > 2L * Integer.MAX_VALUE); - final double[] quantiles = new double[] { 0, 0.1, 0.5, 0.9, 1, randomDouble() }; - Arrays.sort(quantiles); - double prev = Double.NEGATIVE_INFINITY; - for (double q : quantiles) { - final double v = digest.quantile(q); - logger.trace("q=" + q + ", v=" + v); - assertThat(v, Matchers.either(Matchers.closeTo(prev, 0.0000001D)).or(Matchers.greaterThan(prev))); - assertTrue("Unexpectedly low value: " + v, v >= 0.0); - assertTrue("Unexpectedly high value: " + v, v <= 1.0); - prev = v; + try (TDigestState digest = TDigestState.create(arrays(), 100)) { + for (int i = 0; i < 1000; ++i) { + digest.add(randomDouble()); + } + final int count = 1 << 29; + for (int i = 0; i < 10; ++i) { + digest.add(randomDouble(), count); + } + assertEquals(1000 + 10L * (1 << 29), digest.size()); + assertTrue(digest.size() > 2L * Integer.MAX_VALUE); + final double[] quantiles = new double[] { 0, 0.1, 0.5, 0.9, 1, randomDouble() }; + Arrays.sort(quantiles); + double prev = Double.NEGATIVE_INFINITY; + for (double q : quantiles) { + final double v = digest.quantile(q); + logger.trace("q=" + q + ", v=" + v); + assertThat(v, Matchers.either(Matchers.closeTo(prev, 0.0000001D)).or(Matchers.greaterThan(prev))); + assertTrue("Unexpectedly low value: " + v, v >= 0.0); + assertTrue("Unexpectedly high value: " + v, v <= 1.0); + prev = v; + } } } public void testEqualsHashCode() { - final TDigestState empty1 = new EmptyTDigestState(); - final TDigestState empty2 = new EmptyTDigestState(); - final TDigestState a = TDigestState.create(arrays(), 200); - final TDigestState b = TDigestState.create(arrays(), 100); - final TDigestState c = TDigestState.create(arrays(), 100); + try ( + TDigestState empty1 = new EmptyTDigestState(); + TDigestState empty2 = new EmptyTDigestState(); + TDigestState a = TDigestState.create(arrays(), 200); + TDigestState b = TDigestState.create(arrays(), 100); + TDigestState c = TDigestState.create(arrays(), 100); + ) { - assertEquals(empty1, empty2); - assertEquals(empty1.hashCode(), empty2.hashCode()); + assertEquals(empty1, empty2); + assertEquals(empty1.hashCode(), empty2.hashCode()); - assertNotEquals(a, b); - assertNotEquals(a.hashCode(), b.hashCode()); + assertNotEquals(a, b); + assertNotEquals(a.hashCode(), b.hashCode()); - assertNotEquals(a, c); - assertNotEquals(a.hashCode(), c.hashCode()); + assertNotEquals(a, c); + assertNotEquals(a.hashCode(), c.hashCode()); - assertEquals(b, c); - assertEquals(b.hashCode(), c.hashCode()); + assertEquals(b, c); + assertEquals(b.hashCode(), c.hashCode()); - for (int i = 0; i < 100; i++) { - double value = randomDouble(); - a.add(value); - b.add(value); - c.add(value); - } + for (int i = 0; i < 100; i++) { + double value = randomDouble(); + a.add(value); + b.add(value); + c.add(value); + } - assertNotEquals(a, b); - assertNotEquals(a.hashCode(), b.hashCode()); + assertNotEquals(a, b); + assertNotEquals(a.hashCode(), b.hashCode()); - assertNotEquals(a, c); - assertNotEquals(a.hashCode(), c.hashCode()); + assertNotEquals(a, c); + assertNotEquals(a.hashCode(), c.hashCode()); - assertEquals(b, c); - assertEquals(b.hashCode(), c.hashCode()); + assertEquals(b, c); + assertEquals(b.hashCode(), c.hashCode()); - b.add(randomDouble()); - c.add(randomDouble()); + b.add(randomDouble()); + c.add(randomDouble()); - assertNotEquals(b, c); - assertNotEquals(b.hashCode(), c.hashCode()); + assertNotEquals(b, c); + assertNotEquals(b.hashCode(), c.hashCode()); + } } public void testHash() { final HashMap map = new HashMap<>(); final Set set = new HashSet<>(); - final TDigestState empty1 = new EmptyTDigestState(); - final TDigestState empty2 = new EmptyTDigestState(); - final TDigestState a = TDigestState.create(arrays(), 200); - final TDigestState b = TDigestState.create(arrays(), 100); - final TDigestState c = TDigestState.create(arrays(), 100); - - a.add(randomDouble()); - b.add(randomDouble()); - c.add(randomDouble()); - expectThrows(UnsupportedOperationException.class, () -> empty1.add(randomDouble())); - expectThrows(UnsupportedOperationException.class, () -> empty2.add(a)); - - map.put("empty1", empty1); - map.put("empty2", empty2); - map.put("a", a); - map.put("b", b); - map.put("c", c); - set.add(empty1); - set.add(empty2); - set.add(a); - set.add(b); - set.add(c); - - assertEquals(5, map.size()); - assertEquals(4, set.size()); - - assertEquals(empty1, map.get("empty1")); - assertEquals(empty2, map.get("empty2")); - assertEquals(a, map.get("a")); - assertEquals(b, map.get("b")); - assertEquals(c, map.get("c")); - - assertTrue(set.stream().anyMatch(digest -> digest.equals(a))); - assertTrue(set.stream().anyMatch(digest -> digest.equals(b))); - assertTrue(set.stream().anyMatch(digest -> digest.equals(c))); - assertTrue(set.stream().anyMatch(digest -> digest.equals(empty1))); - assertTrue(set.stream().anyMatch(digest -> digest.equals(empty2))); + try ( + TDigestState empty1 = new EmptyTDigestState(); + TDigestState empty2 = new EmptyTDigestState(); + TDigestState a = TDigestState.create(arrays(), 200); + TDigestState b = TDigestState.create(arrays(), 100); + TDigestState c = TDigestState.create(arrays(), 100); + ) { + + a.add(randomDouble()); + b.add(randomDouble()); + c.add(randomDouble()); + expectThrows(UnsupportedOperationException.class, () -> empty1.add(randomDouble())); + expectThrows(UnsupportedOperationException.class, () -> empty2.add(a)); + + map.put("empty1", empty1); + map.put("empty2", empty2); + map.put("a", a); + map.put("b", b); + map.put("c", c); + set.add(empty1); + set.add(empty2); + set.add(a); + set.add(b); + set.add(c); + + assertEquals(5, map.size()); + assertEquals(4, set.size()); + + assertEquals(empty1, map.get("empty1")); + assertEquals(empty2, map.get("empty2")); + assertEquals(a, map.get("a")); + assertEquals(b, map.get("b")); + assertEquals(c, map.get("c")); + + assertTrue(set.stream().anyMatch(digest -> digest.equals(a))); + assertTrue(set.stream().anyMatch(digest -> digest.equals(b))); + assertTrue(set.stream().anyMatch(digest -> digest.equals(c))); + assertTrue(set.stream().anyMatch(digest -> digest.equals(empty1))); + assertTrue(set.stream().anyMatch(digest -> digest.equals(empty2))); + } } public void testFactoryMethods() { - TDigestState fast = TDigestState.create(arrays(), 100); - TDigestState anotherFast = TDigestState.create(arrays(), 100); - TDigestState accurate = TDigestState.createOptimizedForAccuracy(arrays(), 100); - TDigestState anotherAccurate = TDigestState.createUsingParamsFrom(accurate); - - for (int i = 0; i < 100; i++) { - fast.add(i); - anotherFast.add(i); - accurate.add(i); - anotherAccurate.add(i); - } + try ( + TDigestState fast = TDigestState.create(arrays(), 100); + TDigestState anotherFast = TDigestState.create(arrays(), 100); + TDigestState accurate = TDigestState.createOptimizedForAccuracy(arrays(), 100); + TDigestState anotherAccurate = TDigestState.createUsingParamsFrom(accurate); + ) { - for (double p : new double[] { 0.1, 1, 10, 25, 50, 75, 90, 99, 99.9 }) { - double q = p / 100; - assertEquals(fast.quantile(q), accurate.quantile(q), 0.5); - assertEquals(fast.quantile(q), anotherFast.quantile(q), 1e-5); - assertEquals(accurate.quantile(q), anotherAccurate.quantile(q), 1e-5); + for (int i = 0; i < 100; i++) { + fast.add(i); + anotherFast.add(i); + accurate.add(i); + anotherAccurate.add(i); + } + + for (double p : new double[] { 0.1, 1, 10, 25, 50, 75, 90, 99, 99.9 }) { + double q = p / 100; + assertEquals(fast.quantile(q), accurate.quantile(q), 0.5); + assertEquals(fast.quantile(q), anotherFast.quantile(q), 1e-5); + assertEquals(accurate.quantile(q), anotherAccurate.quantile(q), 1e-5); + } + + assertEquals(fast, anotherFast); + assertEquals(accurate, anotherAccurate); + assertNotEquals(fast, accurate); + assertNotEquals(anotherFast, anotherAccurate); } - - assertEquals(fast, anotherFast); - assertEquals(accurate, anotherAccurate); - assertNotEquals(fast, accurate); - assertNotEquals(anotherFast, anotherAccurate); } - private static TDigestState writeToAndReadFrom(TDigestState state, TransportVersion version) throws IOException { + private TDigestState writeToAndReadFrom(TDigestState state, TransportVersion version) throws IOException { BytesRef serializedAggs = serialize(state, version); try ( StreamInput in = new NamedWriteableAwareStreamInput( @@ -203,9 +214,11 @@ public void testSerialization() throws IOException { TDigestState serializedBackwardsCompatible = writeToAndReadFrom(state, TransportVersions.V_8_8_1); assertNotEquals(serializedBackwardsCompatible, state); assertEquals(serializedBackwardsCompatible, backwardsCompatible); + + Releasables.close(state, backwardsCompatible, serialized, serializedBackwardsCompatible); } - private static TDigestArrays arrays() { - return WrapperTDigestArrays.INSTANCE; + private TDigestArrays arrays() { + return new MemoryTrackingTDigestArrays(newLimitedBreaker(ByteSizeValue.ofMb(100))); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java index 6b2259d8fbedc..a609a13a87833 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.text.Text; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -56,7 +57,7 @@ public void testFromXContent() throws IOException { try (XContentParser parser = createParser(builder)) { parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name parser.nextToken(); - HighlightField parsedField = HighlightField.fromXContent(parser); + HighlightField parsedField = SearchResponseUtils.parseHighlightField(parser); assertEquals(highlightField, parsedField); if (highlightField.fragments() != null) { assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); diff --git a/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java index 5f052e1c40dab..c4a0b3b588310 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -72,7 +73,7 @@ protected Reader instanceReader() { @Override protected ProfileResult doParseInstance(XContentParser parser) throws IOException { - return ProfileResult.fromXContent(parser); + return SearchResponseUtils.parseProfileResult(parser); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResultTests.java index eba3a89a953e4..d2a4cdf62a16b 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResultTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.ProfileResultTests; import org.elasticsearch.test.AbstractXContentSerializingTestCase; @@ -55,7 +56,7 @@ protected AggregationProfileShardResult doParseInstance(XContentParser parser) t XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); XContentParserUtils.ensureFieldName(parser, parser.nextToken(), AggregationProfileShardResult.AGGREGATIONS); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - AggregationProfileShardResult result = AggregationProfileShardResult.fromXContent(parser); + AggregationProfileShardResult result = SearchResponseUtils.readAggregationProfileShardResult(parser); XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_ARRAY, parser.currentToken(), parser); XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); return result; diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java index 2ca4744bfd7bd..21c32388e8707 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -56,7 +57,7 @@ protected CollectorResult mutateInstance(CollectorResult instance) { @Override protected CollectorResult doParseInstance(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - CollectorResult result = CollectorResult.fromXContent(parser); + CollectorResult result = SearchResponseUtils.parseCollectorResult(parser); ensureExpectedToken(null, parser.nextToken(), parser); return result; } diff --git a/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java b/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java index 276099e01c9f8..78d76476d06fc 100644 --- a/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/BanFailureLoggingTests.java @@ -184,8 +184,8 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, // acceptable; we mostly ignore the result of cancellation anyway } - // assert busy since failure to remove a ban may be logged after cancellation completed - assertBusy(mockLog::assertAllExpectationsMatched); + // await since failure to remove a ban may be logged after cancellation completed + mockLog.awaitAllExpectationsMatched(); } assertTrue("child tasks did not finish in time", childTaskLock.tryLock(15, TimeUnit.SECONDS)); diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java index 458eeb900071d..310cf467a8391 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -128,7 +128,7 @@ public void testTimerThreadWarningLogging() throws Exception { final ThreadPool.CachedTimeThread thread = new ThreadPool.CachedTimeThread("[timer]", 200, 100); thread.start(); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); thread.interrupt(); thread.join(); @@ -297,7 +297,7 @@ public String toString() { } }; threadPool.schedule(runnable, TimeValue.timeValueMillis(randomLongBetween(0, 300)), EsExecutors.DIRECT_EXECUTOR_SERVICE); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); } finally { assertTrue(terminate(threadPool)); } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareTests.java index 169f6d8060020..2394e0b07cc57 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareTests.java @@ -130,7 +130,7 @@ public void testGroupClusterIndicesFail() { RemoteClusterAwareTest remoteClusterAware = new RemoteClusterAwareTest(); Set remoteClusterNames = Set.of("cluster1", "cluster2", "some-cluster3"); - mustThrowException(new String[] { ":foo" }, NoSuchRemoteClusterException.class, "no such remote cluster"); + mustThrowException(new String[] { ":foo" }, IllegalArgumentException.class, "is invalid because the remote part is empty"); mustThrowException(new String[] { "notacluster:foo" }, NoSuchRemoteClusterException.class, "no such remote cluster"); // Cluster wildcard exclusion requires :* mustThrowException( diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java index 9ea75f060a30d..ff0742c89bba9 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.transport; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -17,7 +16,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.TransportSearchScrollAction; -import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -103,26 +102,34 @@ public void testConnectAndExecuteRequest() throws Exception { randomFrom(RemoteClusterService.DisconnectedStrategy.values()) ); ClusterStateResponse clusterStateResponse = safeAwait( - listener -> client.execute( - ClusterStateAction.REMOTE_TYPE, - new ClusterStateRequest(TEST_REQUEST_TIMEOUT), + listener -> ActionListener.run( ActionListener.runBefore( listener, () -> assertTrue(Thread.currentThread().getName().contains('[' + TEST_THREAD_POOL_NAME + ']')) - ) + ), + clusterStateResponseListener -> { + final var request = new ClusterStateRequest(TEST_REQUEST_TIMEOUT); + if (randomBoolean()) { + client.execute(ClusterStateAction.REMOTE_TYPE, request, clusterStateResponseListener); + } else { + SubscribableListener.newForked( + l -> client.getConnection(randomFrom(request, null), l) + ) + .andThen( + (l, connection) -> client.execute(connection, ClusterStateAction.REMOTE_TYPE, request, l) + ) + .addListener(clusterStateResponseListener); + } + } ) ); assertNotNull(clusterStateResponse); assertEquals("foo_bar_cluster", clusterStateResponse.getState().getClusterName().value()); // also test a failure, there is no handler for scroll registered - ActionNotFoundTransportException ex = asInstanceOf( + ActionNotFoundTransportException ex = safeAwaitAndUnwrapFailure( ActionNotFoundTransportException.class, - ExceptionsHelper.unwrapCause( - safeAwaitFailure( - SearchResponse.class, - listener -> client.execute(TransportSearchScrollAction.REMOTE_TYPE, new SearchScrollRequest(""), listener) - ) - ) + SearchResponse.class, + listener -> client.execute(TransportSearchScrollAction.REMOTE_TYPE, new SearchScrollRequest(""), listener) ); assertEquals("No handler for action [indices:data/read/scroll]", ex.getMessage()); } @@ -169,12 +176,13 @@ public void testEnsureWeReconnect() throws Exception { for (int i = 0; i < 10; i++) { RemoteClusterConnection remoteClusterConnection = remoteClusterService.getRemoteClusterConnection("test"); assertBusy(remoteClusterConnection::assertNoRunningConnections); - ConnectionManager connectionManager = remoteClusterConnection.getConnectionManager(); - Transport.Connection connection = connectionManager.getConnection(remoteNode); - PlainActionFuture closeFuture = new PlainActionFuture<>(); - connection.addCloseListener(closeFuture); - connectionManager.disconnectFromNode(remoteNode); - closeFuture.get(); + + safeAwait(connectionClosedListener -> { + ConnectionManager connectionManager = remoteClusterConnection.getConnectionManager(); + Transport.Connection connection = connectionManager.getConnection(remoteNode); + connection.addCloseListener(connectionClosedListener.map(v -> v)); + connectionManager.disconnectFromNode(remoteNode); + }); var client = remoteClusterService.getRemoteClusterClient( "test", @@ -184,11 +192,21 @@ public void testEnsureWeReconnect() throws Exception { RemoteClusterService.DisconnectedStrategy.RECONNECT_UNLESS_SKIP_UNAVAILABLE ) ); - ClusterStateResponse clusterStateResponse = safeAwait( - listener -> client.execute(ClusterStateAction.REMOTE_TYPE, new ClusterStateRequest(TEST_REQUEST_TIMEOUT), listener) - ); - assertNotNull(clusterStateResponse); - assertEquals("foo_bar_cluster", clusterStateResponse.getState().getClusterName().value()); + + if (randomBoolean()) { + final ClusterStateResponse clusterStateResponse = safeAwait( + listener -> client.execute( + ClusterStateAction.REMOTE_TYPE, + new ClusterStateRequest(TEST_REQUEST_TIMEOUT), + listener + ) + ); + assertNotNull(clusterStateResponse); + assertEquals("foo_bar_cluster", clusterStateResponse.getState().getClusterName().value()); + } else { + final Transport.Connection connection = safeAwait(listener -> client.getConnection(null, listener)); + assertFalse(connection.isClosed()); + } assertTrue(remoteClusterConnection.isNodeConnected(remoteNode)); } } @@ -271,28 +289,42 @@ public void testQuicklySkipUnavailableClusters() throws Exception { assertFalse(remoteClusterService.isRemoteNodeConnected("test", remoteNode)); // check that we quickly fail - ESTestCase.assertThat( - safeAwaitFailure( - ClusterStateResponse.class, - listener -> client.execute( - ClusterStateAction.REMOTE_TYPE, - new ClusterStateRequest(TEST_REQUEST_TIMEOUT), - listener - ) - ), - instanceOf(ConnectTransportException.class) - ); + if (randomBoolean()) { + ESTestCase.assertThat( + safeAwaitFailure( + ClusterStateResponse.class, + listener -> client.execute( + ClusterStateAction.REMOTE_TYPE, + new ClusterStateRequest(TEST_REQUEST_TIMEOUT), + listener + ) + ), + instanceOf(ConnectTransportException.class) + ); + } else { + ESTestCase.assertThat( + safeAwaitFailure(Transport.Connection.class, listener -> client.getConnection(null, listener)), + instanceOf(ConnectTransportException.class) + ); + } } finally { service.clearAllRules(); latch.countDown(); } - assertBusy(() -> { - ClusterStateResponse ignored = safeAwait( - listener -> client.execute(ClusterStateAction.REMOTE_TYPE, new ClusterStateRequest(TEST_REQUEST_TIMEOUT), listener) - ); + assertBusy( // keep retrying on an exception, the goal is to check that we eventually reconnect - }); + randomFrom( + () -> safeAwait( + listener -> client.execute( + ClusterStateAction.REMOTE_TYPE, + new ClusterStateRequest(TEST_REQUEST_TIMEOUT), + listener.map(v -> v) + ) + ), + () -> safeAwait(listener -> client.getConnection(null, listener.map(v -> v))) + ) + ); assertTrue(remoteClusterService.isRemoteNodeConnected("test", remoteNode)); } } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index 28be70533597c..c686329c4154c 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -186,9 +186,10 @@ public void testMismatchedClusterName() { ) ) { assertThat( - asInstanceOf( + safeAwaitFailure( IllegalStateException.class, - safeAwaitFailure(DiscoveryNode.class, listener -> transportServiceA.handshake(connection, timeout, listener)) + DiscoveryNode.class, + listener -> transportServiceA.handshake(connection, timeout, listener) ).getMessage(), containsString( "handshake with [" + discoveryNode + "] failed: remote cluster name [b] does not match local cluster name [a]" @@ -231,9 +232,10 @@ public void testIncompatibleNodeVersions() { ) ) { assertThat( - asInstanceOf( + safeAwaitFailure( IllegalStateException.class, - safeAwaitFailure(DiscoveryNode.class, listener -> transportServiceA.handshake(connection, timeout, listener)) + DiscoveryNode.class, + listener -> transportServiceA.handshake(connection, timeout, listener) ).getMessage(), containsString( "handshake with [" @@ -303,12 +305,10 @@ public void testNodeConnectWithDifferentNodeId() { .version(transportServiceB.getLocalNode().getVersionInformation()) .build(); assertThat( - asInstanceOf( + safeAwaitFailure( ConnectTransportException.class, - safeAwaitFailure( - Releasable.class, - listener -> transportServiceA.connectToNode(discoveryNode, TestProfiles.LIGHT_PROFILE, listener) - ) + Releasable.class, + listener -> transportServiceA.connectToNode(discoveryNode, TestProfiles.LIGHT_PROFILE, listener) ).getMessage(), allOf( containsString("Connecting to [" + discoveryNode.getAddress() + "] failed"), @@ -360,9 +360,10 @@ public void testRejectsMismatchedBuildHash() { ) { assertThat( ExceptionsHelper.unwrap( - asInstanceOf( + safeAwaitFailure( TransportSerializationException.class, - safeAwaitFailure(DiscoveryNode.class, listener -> transportServiceA.handshake(connection, timeout, listener)) + DiscoveryNode.class, + listener -> transportServiceA.handshake(connection, timeout, listener) ), IllegalArgumentException.class ).getMessage(), diff --git a/test/framework/src/main/java/org/elasticsearch/client/internal/RedirectToLocalClusterRemoteClusterClient.java b/test/framework/src/main/java/org/elasticsearch/client/internal/RedirectToLocalClusterRemoteClusterClient.java index cb2b2a78c1bd6..ff3910b2debfb 100644 --- a/test/framework/src/main/java/org/elasticsearch/client/internal/RedirectToLocalClusterRemoteClusterClient.java +++ b/test/framework/src/main/java/org/elasticsearch/client/internal/RedirectToLocalClusterRemoteClusterClient.java @@ -11,10 +11,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportResponse; /** @@ -22,10 +22,10 @@ */ public class RedirectToLocalClusterRemoteClusterClient implements RemoteClusterClient { - private final ElasticsearchClient delegate; + private final ElasticsearchClient localNodeClient; - public RedirectToLocalClusterRemoteClusterClient(ElasticsearchClient delegate) { - this.delegate = delegate; + public RedirectToLocalClusterRemoteClusterClient(ElasticsearchClient localNodeClient) { + this.localNodeClient = localNodeClient; } @SuppressWarnings("unchecked") @@ -35,6 +35,21 @@ public void Request request, ActionListener listener ) { - delegate.execute(new ActionType(action.name()), request, listener.map(r -> (Response) r)); + localNodeClient.execute(new ActionType<>(action.name()), request, listener.map(r -> (Response) r)); + } + + @Override + public void execute( + Transport.Connection connection, + RemoteClusterActionType action, + Request request, + ActionListener listener + ) { + throw new AssertionError("not implemented on RedirectToLocalClusterRemoteClusterClient"); + } + + @Override + public void getConnection(Request request, ActionListener listener) { + throw new AssertionError("not implemented on RedirectToLocalClusterRemoteClusterClient"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 7a9e9ed5c2a4e..90c621c62c305 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -44,6 +44,8 @@ import java.util.regex.Pattern; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; +import static org.elasticsearch.test.NeverMatcher.never; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; @@ -280,9 +282,12 @@ public void testReadBlobWithReadTimeouts() { assertThat(exception, readTimeoutExceptionMatcher()); assertThat( exception.getMessage().toLowerCase(Locale.ROOT), - either(containsString("read timed out")).or(containsString("premature end of chunk coded message body: closing chunk expected")) - .or(containsString("Read timed out")) - .or(containsString("unexpected end of file from server")) + anyOf( + containsString("read timed out"), + containsString("premature end of chunk coded message body: closing chunk expected"), + containsString("Read timed out"), + containsString("unexpected end of file from server") + ) ); assertThat(exception.getSuppressed().length, getMaxRetriesMatcher(maxRetries)); } @@ -323,10 +328,15 @@ public void testReadBlobWithPrematureConnectionClose() { final int maxRetries = randomInt(20); final BlobContainer blobContainer = createBlobContainer(maxRetries, null, null, null); + final boolean alwaysFlushBody = randomBoolean(); + // HTTP server sends a partial response final byte[] bytes = randomBlobContent(1); httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_incomplete"), exchange -> { sendIncompleteContent(exchange, bytes); + if (alwaysFlushBody) { + exchange.getResponseBody().flush(); + } exchange.close(); }); @@ -341,9 +351,14 @@ public void testReadBlobWithPrematureConnectionClose() { }); assertThat( exception.getMessage().toLowerCase(Locale.ROOT), - either(containsString("premature end of chunk coded message body: closing chunk expected")).or( - containsString("premature end of content-length delimited message body") - ).or(containsString("connection closed prematurely")) + anyOf( + // closing the connection after sending the headers and some incomplete body might yield one of these: + containsString("premature end of chunk coded message body: closing chunk expected"), + containsString("premature end of content-length delimited message body"), + containsString("connection closed prematurely"), + // if we didn't call exchange.getResponseBody().flush() then we might not even have sent the response headers: + alwaysFlushBody ? never() : containsString("the target server failed to respond") + ) ); assertThat(exception.getSuppressed().length, getMaxRetriesMatcher(Math.min(10, maxRetries))); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index 86bd0899e862a..cc4aac686a02d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Response; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -26,12 +27,14 @@ import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.profile.ProfileResult; @@ -62,8 +65,10 @@ import java.util.Locale; import java.util.Map; +import static java.util.stream.Collectors.toMap; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -230,7 +235,7 @@ public static SearchResponse parseInnerSearchResponse(XContentParser parser) thr if (SearchHits.Fields.HITS.equals(currentFieldName)) { hits = parseSearchHits(parser); } else if (InternalAggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) { - aggs = InternalAggregations.fromXContent(parser); + aggs = parseInternalAggregations(parser); } else if (Suggest.NAME.equals(currentFieldName)) { suggest = parseSuggest(parser); } else if (SearchProfileResults.PROFILE_FIELD.equals(currentFieldName)) { @@ -254,7 +259,7 @@ public static SearchResponse parseInnerSearchResponse(XContentParser parser) thr } else if (token == XContentParser.Token.START_ARRAY) { if (RestActions.FAILURES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - failures.add(ShardSearchFailure.fromXContent(parser)); + failures.add(parseShardSearchFailure(parser)); } } else { parser.skipChildren(); @@ -407,7 +412,7 @@ private static SearchResponse.Cluster parseCluster(String clusterAlias, XContent } else if (token == XContentParser.Token.START_ARRAY) { if (RestActions.FAILURES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - failures.add(ShardSearchFailure.fromXContent(parser)); + failures.add(parseShardSearchFailure(parser)); } } else { parser.skipChildren(); @@ -483,7 +488,7 @@ private static void parseProfileResultsEntry(XContentParser parser, Map ProfileResult.fromXContent(p), SearchProfileDfsPhaseResult.STATISTICS); + parser.declareObject(optionalConstructorArg(), (p, c) -> parseProfileResult(p), SearchProfileDfsPhaseResult.STATISTICS); parser.declareObjectArray(optionalConstructorArg(), (p, c) -> parseQueryProfileShardResult(p), SearchProfileDfsPhaseResult.KNN); PROFILE_DFS_PHASE_RESULT_PARSER = parser.build(); } @@ -546,11 +551,11 @@ public static QueryProfileShardResult parseQueryProfileShardResult(XContentParse } else if (token == XContentParser.Token.START_ARRAY) { if (QueryProfileShardResult.QUERY_ARRAY.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - queryProfileResults.add(ProfileResult.fromXContent(parser)); + queryProfileResults.add(parseProfileResult(parser)); } } else if (QueryProfileShardResult.COLLECTOR.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - collector = CollectorResult.fromXContent(parser); + collector = parseCollectorResult(parser); } } else { parser.skipChildren(); @@ -704,7 +709,7 @@ public static void declareInnerHitsParseFields(ObjectParser, parser.declareField( (map, list) -> map.put(SearchHit.Fields.SORT, list), - SearchSortValues::fromXContent, + SearchResponseUtils::parseSearchSortValues, new ParseField(SearchHit.Fields.SORT), ObjectParser.ValueType.OBJECT_ARRAY ); @@ -753,7 +758,7 @@ private static Map parseInnerHits(XContentParser parser) thr private static Map parseHighlightFields(XContentParser parser) throws IOException { Map highlightFields = new HashMap<>(); while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - HighlightField highlightField = HighlightField.fromXContent(parser); + HighlightField highlightField = parseHighlightField(parser); highlightFields.put(highlightField.name(), highlightField); } return highlightFields; @@ -851,11 +856,9 @@ public static SearchHit searchHitFromMap(Map values) { String index = get(SearchHit.Fields._INDEX, values, null); String clusterAlias = null; if (index != null) { - int indexOf = index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); - if (indexOf > 0) { - clusterAlias = index.substring(0, indexOf); - index = index.substring(indexOf + 1); - } + String[] split = RemoteClusterAware.splitIndexName(index); + clusterAlias = split[0]; + index = split[1]; } ShardId shardId = get(SearchHit.Fields._SHARD, values, null); String nodeId = get(SearchHit.Fields._NODE, values, null); @@ -898,4 +901,168 @@ private static T get(String key, Map map, T defaultValue) { return (T) map.getOrDefault(key, defaultValue); } + public static AggregationProfileShardResult readAggregationProfileShardResult(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser); + List aggProfileResults = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + aggProfileResults.add(parseProfileResult(parser)); + } + return new AggregationProfileShardResult(aggProfileResults); + } + + public static CollectorResult parseCollectorResult(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + String currentFieldName = null; + String name = null, reason = null; + long time = -1; + List children = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (CollectorResult.NAME.match(currentFieldName, parser.getDeprecationHandler())) { + name = parser.text(); + } else if (CollectorResult.REASON.match(currentFieldName, parser.getDeprecationHandler())) { + reason = parser.text(); + } else if (CollectorResult.TIME.match(currentFieldName, parser.getDeprecationHandler())) { + // we need to consume this value, but we use the raw nanosecond value + parser.text(); + } else if (CollectorResult.TIME_NANOS.match(currentFieldName, parser.getDeprecationHandler())) { + time = parser.longValue(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (CollectorResult.CHILDREN.match(currentFieldName, parser.getDeprecationHandler())) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + children.add(parseCollectorResult(parser)); + } + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + return new CollectorResult(name, reason, time, children); + } + + public static HighlightField parseHighlightField(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String fieldName = parser.currentName(); + Text[] fragments; + XContentParser.Token token = parser.nextToken(); + if (token == XContentParser.Token.START_ARRAY) { + List values = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + values.add(new Text(parser.text())); + } + fragments = values.toArray(Text.EMPTY_ARRAY); + } else if (token == XContentParser.Token.VALUE_NULL) { + fragments = null; + } else { + throw new ParsingException(parser.getTokenLocation(), "unexpected token type [" + token + "]"); + } + return new HighlightField(fieldName, fragments); + } + + private static InternalAggregations parseInternalAggregations(XContentParser parser) throws IOException { + final List aggregations = new ArrayList<>(); + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_OBJECT) { + SetOnce typedAgg = new SetOnce<>(); + String currentField = parser.currentName(); + parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, InternalAggregation.class, typedAgg::set); + if (typedAgg.get() != null) { + aggregations.add(typedAgg.get()); + } else { + throw new ParsingException( + parser.getTokenLocation(), + String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField) + ); + } + } + } + return new InternalAggregations(aggregations); + } + + private static final InstantiatingObjectParser PROFILE_RESULT_PARSER; + static { + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "profile_result", + true, + ProfileResult.class + ); + parser.declareString(constructorArg(), ProfileResult.TYPE); + parser.declareString(constructorArg(), ProfileResult.DESCRIPTION); + parser.declareObject( + constructorArg(), + (p, c) -> p.map().entrySet().stream().collect(toMap(Map.Entry::getKey, e -> ((Number) e.getValue()).longValue())), + ProfileResult.BREAKDOWN + ); + parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), ProfileResult.DEBUG); + parser.declareLong(constructorArg(), ProfileResult.NODE_TIME_RAW); + parser.declareObjectArray(optionalConstructorArg(), (p, c) -> parseProfileResult(p), ProfileResult.CHILDREN); + PROFILE_RESULT_PARSER = parser.build(); + } + + public static ProfileResult parseProfileResult(XContentParser p) throws IOException { + return PROFILE_RESULT_PARSER.parse(p, null); + } + + public static SearchSortValues parseSearchSortValues(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + return new SearchSortValues(parser.list().toArray()); + } + + public static ShardSearchFailure parseShardSearchFailure(XContentParser parser) throws IOException { + XContentParser.Token token; + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + String currentFieldName = null; + int shardId = -1; + String indexName = null; + String clusterAlias = null; + String nodeId = null; + ElasticsearchException exception = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (ShardSearchFailure.SHARD_FIELD.equals(currentFieldName)) { + shardId = parser.intValue(); + } else if (ShardSearchFailure.INDEX_FIELD.equals(currentFieldName)) { + indexName = parser.text(); + int indexOf = indexName.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (indexOf > 0) { + clusterAlias = indexName.substring(0, indexOf); + indexName = indexName.substring(indexOf + 1); + } + } else if (ShardSearchFailure.NODE_FIELD.equals(currentFieldName)) { + nodeId = parser.text(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (ShardSearchFailure.REASON_FIELD.equals(currentFieldName)) { + exception = ElasticsearchException.fromXContent(parser); + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + SearchShardTarget searchShardTarget = null; + if (nodeId != null) { + searchShardTarget = new SearchShardTarget( + nodeId, + new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), + clusterAlias + ); + } + return new ShardSearchFailure(exception, searchShardTarget); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index 7fb480952356e..8c6058b47cf0c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -243,7 +243,7 @@ public static void awaitNoPendingTasks(ClusterService clusterService) { ESTestCase.safeAwait( listener -> clusterService.submitUnbatchedStateUpdateTask( "await-queue-empty", - new ClusterStateUpdateTask(Priority.LANGUID, TimeValue.timeValueSeconds(10)) { + new ClusterStateUpdateTask(Priority.LANGUID, ESTestCase.SAFE_AWAIT_TIMEOUT) { @Override public ClusterState execute(ClusterState currentState) { return currentState; @@ -287,7 +287,7 @@ public String toString() { if (predicate.test(clusterService.state())) { listener.onResponse(null); } else { - listener.addTimeout(TimeValue.timeValueSeconds(10), clusterService.threadPool(), EsExecutors.DIRECT_EXECUTOR_SERVICE); + listener.addTimeout(ESTestCase.SAFE_AWAIT_TIMEOUT, clusterService.threadPool(), EsExecutors.DIRECT_EXECUTOR_SERVICE); } return listener; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 00cfedb257187..7021ea47aa8dd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -39,6 +39,7 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.tests.util.TimeUnits; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchWrapperException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionFuture; @@ -57,6 +58,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.CompositeBytesReference; @@ -214,7 +216,6 @@ import static org.hamcrest.Matchers.emptyCollectionOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.startsWith; /** @@ -577,6 +578,21 @@ public final void before() { } } + private final List breakers = Collections.synchronizedList(new ArrayList<>()); + + protected final CircuitBreaker newLimitedBreaker(ByteSizeValue max) { + CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("", max); + breakers.add(breaker); + return breaker; + } + + @After + public final void allBreakersMemoryReleased() { + for (CircuitBreaker breaker : breakers) { + assertThat(breaker.getUsed(), equalTo(0L)); + } + } + /** * Whether or not we check after each test whether it has left warnings behind. That happens if any deprecated feature or syntax * was used by the test and the test didn't assert on it using {@link #assertWarnings(String...)}. @@ -1642,6 +1658,15 @@ public String randomCompatibleMediaType(RestApiVersion version) { } public String compatibleMediaType(XContentType type, RestApiVersion version) { + if (type.canonical().equals(type)) { + throw new IllegalArgumentException( + "Compatible header is only supported for vendor content types." + + " You requested " + + type.name() + + "but likely want VND_" + + type.name() + ); + } return type.toParsedMediaType() .responseContentTypeHeader(Map.of(MediaType.COMPATIBLE_WITH_PARAMETER_NAME, String.valueOf(version.major))); } @@ -2405,6 +2430,44 @@ public static Exception safeAwaitFailure(@SuppressWarnings("unused") Class ExpectedException safeAwaitFailure( + Class exceptionType, + Class responseType, + Consumer> consumer + ) { + return asInstanceOf(exceptionType, safeAwaitFailure(responseType, consumer)); + } + + /** + * Wait for the exceptional completion of the given async action, with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, + * preserving the thread's interrupt status flag and converting a successful completion, interrupt or timeout into an {@link + * AssertionError} to trigger a test failure. Any layers of {@link ElasticsearchWrapperException} are removed from the thrown exception + * using {@link ExceptionsHelper#unwrapCause}. + * + * @param responseType Class of listener response type, to aid type inference but otherwise ignored. + * @param exceptionType Expected unwrapped exception type. This method throws an {@link AssertionError} if a different type of exception + * is seen. + * + * @return The unwrapped exception with which the {@code listener} was completed exceptionally. + */ + public static ExpectedException safeAwaitAndUnwrapFailure( + Class exceptionType, + Class responseType, + Consumer> consumer + ) { + return asInstanceOf(exceptionType, ExceptionsHelper.unwrapCause(safeAwaitFailure(responseType, consumer))); + } + /** * Send the current thread to sleep for the given duration, asserting that the sleep is not interrupted but preserving the thread's * interrupt status flag in any case. diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java index a1ce19f820433..7b6ea4e2cd256 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTokenStreamTestCase.java @@ -12,10 +12,8 @@ import com.carrotsearch.randomizedtesting.annotations.Listeners; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; -import org.apache.lucene.tests.analysis.BaseTokenStreamTestCase; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.TimeUnits; -import org.elasticsearch.bootstrap.BootstrapForTesting; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; @@ -29,19 +27,8 @@ * Basic test case for token streams. the assertion methods in this class will * run basic checks to enforce correct behavior of the token streams. */ -public abstract class ESTokenStreamTestCase extends BaseTokenStreamTestCase { - - static { - try { - Class.forName("org.elasticsearch.test.ESTestCase"); - } catch (ClassNotFoundException e) { - throw new AssertionError(e); - } - BootstrapForTesting.ensureInitialized(); - } - +public abstract class ESTokenStreamTestCase extends ESTestCase { public Settings.Builder newAnalysisSettingsBuilder() { return Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()); } - } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index ff66d59a21c5b..7a04384298933 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1536,7 +1536,9 @@ private void randomlyResetClients() { // only reset the clients on nightly tests, it causes heavy load... if (RandomizedTest.isNightly() && rarely(random)) { final Collection nodesAndClients = nodes.values(); + logger.info("Resetting [{}] node clients on internal test cluster", nodesAndClients.size()); for (NodeAndClient nodeAndClient : nodesAndClients) { + logger.info("Resetting [{}] node client on internal test cluster", nodeAndClient.name); nodeAndClient.resetClient(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLog.java b/test/framework/src/main/java/org/elasticsearch/test/MockLog.java index 57acca08c23e8..4a012bb361e65 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLog.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLog.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; import java.util.Arrays; import java.util.List; @@ -23,10 +24,13 @@ import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; /** @@ -112,10 +116,46 @@ public void assertAllExpectationsMatched() { } } + public void awaitAllExpectationsMatched() { + awaitAllExpectationsMatched(ESTestCase.SAFE_AWAIT_TIMEOUT); + } + + // exposed for testing + void awaitAllExpectationsMatched(TimeValue waitTime) { + final var deadlineNanos = System.nanoTime() + waitTime.nanos(); + final var nanosPerMilli = TimeValue.timeValueMillis(1).nanos(); + try { + for (LoggingExpectation expectation : expectations) { + final var remainingMillis = (deadlineNanos - System.nanoTime() + nanosPerMilli - 1) / nanosPerMilli; // round up + assertThat(remainingMillis, greaterThan(0L)); + expectation.awaitMatched(remainingMillis); + } + } catch (InterruptedException interruptedException) { + Thread.currentThread().interrupt(); + throw new AssertionError("interrupted", interruptedException); + } + } + + /** + * Keeps track of whether the {@link LogEvent} instances it receives match the expected content. + */ public interface LoggingExpectation { + /** + * Called on every {@link LogEvent} received by the captured appenders. + */ void match(LogEvent event); + /** + * Returns if this expectation is matched, otherwise throws an {@link AssertionError}. + */ void assertMatched(); + + /** + * Returns if this expectation is matched within the given number of milliseconds, otherwise throws an {@link AssertionError}. + */ + default void awaitMatched(long millis) throws InterruptedException { + assertMatched(); + } } public abstract static class AbstractEventExpectation implements LoggingExpectation { @@ -123,14 +163,13 @@ public abstract static class AbstractEventExpectation implements LoggingExpectat protected final String logger; protected final Level level; protected final String message; - volatile boolean saw; + protected final CountDownLatch seenLatch = new CountDownLatch(1); public AbstractEventExpectation(String name, String logger, Level level, String message) { this.name = name; this.logger = logger; this.level = level; this.message = message; - this.saw = false; } @Override @@ -138,11 +177,11 @@ public void match(LogEvent event) { if (event.getLevel().equals(level) && event.getLoggerName().equals(logger) && innerMatch(event)) { if (Regex.isSimpleMatchPattern(message)) { if (Regex.simpleMatch(message, event.getMessage().getFormattedMessage())) { - saw = true; + seenLatch.countDown(); } } else { if (event.getMessage().getFormattedMessage().contains(message)) { - saw = true; + seenLatch.countDown(); } } } @@ -162,7 +201,7 @@ public UnseenEventExpectation(String name, String logger, Level level, String me @Override public void assertMatched() { - assertThat("expected not to see " + name + " but did", saw, equalTo(false)); + assertThat("expected not to see " + name + " but did", seenLatch.getCount(), equalTo(1L)); } } @@ -174,7 +213,12 @@ public SeenEventExpectation(String name, String logger, Level level, String mess @Override public void assertMatched() { - assertThat("expected to see " + name + " but did not", saw, equalTo(true)); + assertThat("expected to see " + name + " but did not", seenLatch.getCount(), equalTo(0L)); + } + + @Override + public void awaitMatched(long millis) throws InterruptedException { + assertThat("expected to see " + name + " but did not", seenLatch.await(millis, TimeUnit.MILLISECONDS), equalTo(true)); } } @@ -195,7 +239,17 @@ public void assertMatched() { if (expectSeen) { super.assertMatched(); } else { - assertThat("expected not to see " + name + " yet but did", saw, equalTo(false)); + assertThat("expected not to see " + name + " yet but did", seenLatch.getCount(), equalTo(1L)); + } + } + + @Override + public void awaitMatched(long millis) throws InterruptedException { + if (expectSeen) { + super.awaitMatched(millis); + } else { + // do not wait for negative expectation + assertThat("expected not to see " + name + " yet but did", seenLatch.getCount(), equalTo(1L)); } } } @@ -229,11 +283,11 @@ public boolean innerMatch(final LogEvent event) { public static class PatternSeenEventExpectation implements LoggingExpectation { - protected final String name; - protected final String logger; - protected final Level level; - protected final Pattern pattern; - volatile boolean saw; + private final String name; + private final String logger; + private final Level level; + private final Pattern pattern; + private final CountDownLatch seenLatch = new CountDownLatch(1); public PatternSeenEventExpectation(String name, String logger, Level level, String pattern) { this.name = name; @@ -246,16 +300,20 @@ public PatternSeenEventExpectation(String name, String logger, Level level, Stri public void match(LogEvent event) { if (event.getLevel().equals(level) && event.getLoggerName().equals(logger)) { if (pattern.matcher(event.getMessage().getFormattedMessage()).matches()) { - saw = true; + seenLatch.countDown(); } } } @Override public void assertMatched() { - assertThat(name, saw, equalTo(true)); + assertThat(name, seenLatch.getCount(), equalTo(0L)); } + @Override + public void awaitMatched(long millis) throws InterruptedException { + assertThat(name, seenLatch.await(millis, TimeUnit.MILLISECONDS), equalTo(true)); + } } /** @@ -284,6 +342,15 @@ public void assertMatched() { } } + @Override + public void awaitMatched(long millis) throws InterruptedException { + try { + delegate.awaitMatched(millis); + } finally { + assertMatchedCalled = true; + } + } + @Override public String toString() { return delegate.toString(); @@ -336,4 +403,17 @@ public static void assertThatLogger(Runnable action, Class loggerOwner, MockL mockLog.assertAllExpectationsMatched(); } } + + /** + * Executes an action and waits until the given logging expectations are satisfied. + */ + public static void awaitLogger(Runnable action, Class loggerOwner, MockLog.LoggingExpectation... expectations) { + try (var mockLog = MockLog.capture(loggerOwner)) { + for (var expectation : expectations) { + mockLog.addExpectation(expectation); + } + action.run(); + mockLog.awaitAllExpectationsMatched(); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/NeverMatcher.java b/test/framework/src/main/java/org/elasticsearch/test/NeverMatcher.java new file mode 100644 index 0000000000000..aad771a30d82e --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/NeverMatcher.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.test; + +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; + +public class NeverMatcher extends BaseMatcher { + @SuppressWarnings("unchecked") + public static Matcher never() { + return (Matcher) INSTANCE; + } + + private static final Matcher INSTANCE = new NeverMatcher<>(); + + private NeverMatcher() {/* singleton */} + + @Override + public boolean matches(Object actual) { + return false; + } + + @Override + public void describeTo(Description description) { + description.appendText("never matches"); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 6ed0a1dfe0229..c8542011bcfd8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1179,7 +1179,14 @@ protected static void wipeDataStreams() throws IOException { // We hit a version of ES that doesn't serialize DeleteDataStreamAction.Request#wildcardExpressionsOriginallySpecified field // or that doesn't support data streams so it's safe to ignore int statusCode = ee.getResponse().getStatusLine().getStatusCode(); - if (statusCode < 404 || statusCode > 405) { + if (statusCode == 400) { + // the test cluster likely does not include the data streams module so we can ignore this error code + // additionally there is an implementation gotcha that cause response code to be 400 or 405 dependent on if + // "_data_stream/*" matches a registered index pattern such as {a}/{b} but not for the HTTP verb. + // Prior to v9 POST {index}/{type} was registered as a compatible index pattern so the request would partially match + // and return a 405, but without that pattern registered at all the return value is a 400. + return; + } else if (statusCode < 404 || statusCode > 405) { throw ee; } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index ba7aa9977b917..840ccd611c52f 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -1373,7 +1373,7 @@ public void handleException(TransportException exp) {} serviceA.sendRequest(nodeB, "internal:test", new StringMessageRequest("", 10), noopResponseHandler); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); //////////////////////////////////////////////////////////////////////// // tests for included action type "internal:testError" which returns an error @@ -1420,7 +1420,7 @@ public void handleException(TransportException exp) {} serviceA.sendRequest(nodeB, "internal:testError", new StringMessageRequest(""), noopResponseHandler); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); //////////////////////////////////////////////////////////////////////// // tests for excluded action type "internal:testNotSeen" @@ -1467,7 +1467,7 @@ public void handleException(TransportException exp) {} submitRequest(serviceA, nodeB, "internal:testNotSeen", new StringMessageRequest(""), noopResponseHandler).get(); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); } } @@ -3507,9 +3507,10 @@ public static ConnectTransportException connectToNodeExpectFailure( DiscoveryNode node, ConnectionProfile connectionProfile ) { - return asInstanceOf( + return safeAwaitFailure( ConnectTransportException.class, - safeAwaitFailure(Releasable.class, listener -> service.connectToNode(node, connectionProfile, listener)) + Releasable.class, + listener -> service.connectToNode(node, connectionProfile, listener) ); } @@ -3532,9 +3533,10 @@ public static ConnectTransportException openConnectionExpectFailure( DiscoveryNode node, ConnectionProfile connectionProfile ) { - return asInstanceOf( + return safeAwaitFailure( ConnectTransportException.class, - safeAwaitFailure(Transport.Connection.class, listener -> service.openConnection(node, connectionProfile, listener)) + Transport.Connection.class, + listener -> service.openConnection(node, connectionProfile, listener) ); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/MockLogTests.java b/test/framework/src/test/java/org/elasticsearch/test/MockLogTests.java index 2720da55544d1..89d23c3b345b9 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/MockLogTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/MockLogTests.java @@ -9,8 +9,11 @@ package org.elasticsearch.test; +import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.concurrent.atomic.AtomicBoolean; @@ -27,7 +30,7 @@ public void testConcurrentLogAndLifecycle() throws Exception { logThread.start(); for (int i = 0; i < 1000; i++) { - try (var mockLog = MockLog.capture(MockLogTests.class)) { + try (var ignored = MockLog.capture(MockLogTests.class)) { Thread.yield(); } } @@ -35,4 +38,62 @@ public void testConcurrentLogAndLifecycle() throws Exception { keepGoing.set(false); logThread.join(); } + + @TestLogging(reason = "checking log behaviour", value = "org.elasticsearch.test.MockLogTests:INFO") + public void testAwaitUnseenEvent() { + try (var mockLog = MockLog.capture(MockLogTests.class)) { + mockLog.addExpectation( + new MockLog.UnseenEventExpectation("unseen", MockLogTests.class.getCanonicalName(), Level.INFO, "unexpected") + ); + Thread.currentThread().interrupt(); // ensures no blocking calls + mockLog.awaitAllExpectationsMatched(); + mockLog.assertAllExpectationsMatched(); + + logger.info("unexpected"); + expectThrows(AssertionError.class, mockLog::awaitAllExpectationsMatched); + expectThrows(AssertionError.class, mockLog::assertAllExpectationsMatched); + + assertTrue(Thread.interrupted()); // clear interrupt flag again + } + } + + @TestLogging(reason = "checking log behaviour", value = "org.elasticsearch.test.MockLogTests:INFO") + public void testAwaitSeenEvent() throws InterruptedException { + try (var mockLog = MockLog.capture(MockLogTests.class)) { + mockLog.addExpectation(new MockLog.SeenEventExpectation("seen", MockLogTests.class.getCanonicalName(), Level.INFO, "expected")); + + expectThrows(AssertionError.class, () -> mockLog.awaitAllExpectationsMatched(TimeValue.timeValueMillis(10))); + expectThrows(AssertionError.class, mockLog::assertAllExpectationsMatched); + + final var logThread = new Thread(() -> { + logger.info("expected"); + mockLog.assertAllExpectationsMatched(); + }); + logThread.start(); + mockLog.awaitAllExpectationsMatched(); + mockLog.assertAllExpectationsMatched(); + logThread.join(); + } + } + + @TestLogging(reason = "checking log behaviour", value = "org.elasticsearch.test.MockLogTests:INFO") + public void testAwaitPatternEvent() throws InterruptedException { + try (var mockLog = MockLog.capture(MockLogTests.class)) { + mockLog.addExpectation( + new MockLog.PatternSeenEventExpectation("seen", MockLogTests.class.getCanonicalName(), Level.INFO, ".*expected.*") + ); + + expectThrows(AssertionError.class, () -> mockLog.awaitAllExpectationsMatched(TimeValue.timeValueMillis(10))); + expectThrows(AssertionError.class, mockLog::assertAllExpectationsMatched); + + final var logThread = new Thread(() -> { + logger.info("blah blah expected blah blah"); + mockLog.assertAllExpectationsMatched(); + }); + logThread.start(); + mockLog.awaitAllExpectationsMatched(); + mockLog.assertAllExpectationsMatched(); + logThread.join(); + } + } } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index cb98f9de31ff5..7df791bf11559 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -18,7 +18,8 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), - CHUNKING_SETTINGS_ENABLED("es.inference_chunking_settings_feature_flag_enabled=true", Version.fromString("8.16.0"), null); + CHUNKING_SETTINGS_ENABLED("es.inference_chunking_settings_feature_flag_enabled=true", Version.fromString("8.16.0"), null), + INFERENCE_SCALE_TO_ZERO("es.inference_scale_to_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; public final Version from; diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java index 9bc58dd64404e..1d7cc76be165b 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java @@ -19,7 +19,9 @@ public final class DefaultLocalClusterSpecBuilder extends AbstractLocalClusterSp public DefaultLocalClusterSpecBuilder() { super(); - this.apply(c -> c.systemProperty("ingest.geoip.downloader.enabled.default", "false")); + this.apply( + c -> c.systemProperty("ingest.geoip.downloader.enabled.default", "false").systemProperty("tests.testfeatures.enabled", "true") + ); this.apply(new FipsEnabledClusterConfigProvider()); this.settings(new DefaultSettingsProvider()); this.environment(new DefaultEnvironmentProvider()); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 2f8cccdc303e6..4c612d5e04886 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -823,7 +823,7 @@ private SingleForecast forecast(Metadata metadata, DataStream stream, long forec stream = stream.unsafeRollover( new Index(rolledDataStreamInfo.v1(), uuid), rolledDataStreamInfo.v2(), - false, + null, stream.getAutoShardingEvent() ); diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index ebc79ca6ce44a..eb0796672a174 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -82,5 +82,6 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") + task.skipTest("wildcard/30_ignore_above_synthetic_source/wildcard field type ignore_above", "Temporary until backported") }) diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java index 501a664d64698..274d723a37574 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -157,6 +158,7 @@ public void testFollowIndexAndCloseNode() throws Exception { followRequest.getParameters().setMaxWriteRequestOperationCount(randomIntBetween(32, 2048)); followRequest.getParameters().setMaxWriteRequestSize(new ByteSizeValue(randomIntBetween(1, 4096), ByteSizeUnit.KB)); followRequest.getParameters().setMaxOutstandingWriteRequests(randomIntBetween(1, 10)); + followRequest.waitForActiveShards(ActiveShardCount.ALL); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); disableDelayedAllocation("index2"); logger.info("--> follow request {}", Strings.toString(followRequest)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index b4607e002f27e..2c633a43264f6 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -43,6 +43,7 @@ import org.elasticsearch.license.RemoteClusterLicenseChecker; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.xpack.ccr.action.CcrRequests; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; @@ -424,6 +425,7 @@ public static RemoteClusterClient wrapRemoteClusterClient( return new RemoteClusterClient() { @Override public void execute( + Transport.Connection connection, RemoteClusterActionType action, Request request, ActionListener listener @@ -434,9 +436,14 @@ public void null, request, listener, - (r, l) -> client.execute(action, r, l) + (r, l) -> client.execute(connection, action, r, l) ); } + + @Override + public void getConnection(Request request, ActionListener listener) { + client.getConnection(request, listener); + } }; } } @@ -466,6 +473,7 @@ private static RemoteClusterClient systemClient(ThreadContext threadContext, Rem return new RemoteClusterClient() { @Override public void execute( + Transport.Connection connection, RemoteClusterActionType action, Request request, ActionListener listener @@ -473,9 +481,14 @@ public void final Supplier supplier = threadContext.newRestorableContext(false); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { threadContext.markAsSystemContext(); - delegate.execute(action, request, new ContextPreservingActionListener<>(supplier, listener)); + delegate.execute(connection, action, request, new ContextPreservingActionListener<>(supplier, listener)); } } + + @Override + public void getConnection(Request request, ActionListener listener) { + delegate.getConnection(request, listener); + } }; } diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index 72436bb9d5171..47848310fe781 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -228,6 +228,7 @@ exports org.elasticsearch.xpack.core.watcher.trigger; exports org.elasticsearch.xpack.core.watcher.watch; exports org.elasticsearch.xpack.core.watcher; + exports org.elasticsearch.xpack.cluster.settings; provides org.elasticsearch.action.admin.cluster.node.info.ComponentVersionNumber with diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java index 8db05703a3f0d..01280b1d95f80 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RemoteClusterLicenseChecker.java @@ -239,7 +239,7 @@ private void remoteClusterLicense(final String clusterAlias, final ActionListene * @return true if the collection of indices contains a remote index, otherwise false */ public static boolean isRemoteIndex(final String index) { - return index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR) != -1; + return RemoteClusterAware.isRemoteIndexName(index); } /** @@ -275,7 +275,7 @@ public static List remoteIndices(final Collection indices) { public static List remoteClusterAliases(final Set remoteClusters, final List indices) { return indices.stream() .filter(RemoteClusterLicenseChecker::isRemoteIndex) - .map(index -> index.substring(0, index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR))) + .map(index -> RemoteClusterAware.splitIndexName(index)[0]) .distinct() .flatMap(clusterExpression -> ClusterNameExpressionResolver.resolveClusterNames(remoteClusters, clusterExpression).stream()) .distinct() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/settings/ClusterSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/settings/ClusterSettings.java new file mode 100644 index 0000000000000..1127889783f16 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/settings/ClusterSettings.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.cluster.settings; + +import org.elasticsearch.common.settings.Setting; + +public class ClusterSettings { + public static final Setting CLUSTER_LOGSDB_ENABLED = Setting.boolSetting( + "cluster.logsdb.enabled", + false, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java index 7fca223b2ee7e..cb578fdb157de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java @@ -161,7 +161,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public ActionRequestValidationException validate() { ActionRequestValidationException validationException = new ActionRequestValidationException(); if (numberOfAllocations != null) { - if (numberOfAllocations < 1) { + if (numberOfAllocations < 0 || (isInternal == false && numberOfAllocations == 0)) { validationException.addValidationError("[" + NUMBER_OF_ALLOCATIONS + "] must be a positive integer"); } if (isInternal == false diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java index 013d7cc21a54a..9233841891461 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java @@ -39,7 +39,7 @@ public final class SystemPrivilege extends Privilege { RetentionLeaseActions.REMOVE.name() + "*", // needed for CCR to remove retention leases RetentionLeaseActions.RENEW.name() + "*", // needed for CCR to renew retention leases "indices:admin/settings/update", // needed for DiskThresholdMonitor.markIndicesReadOnly - CompletionPersistentTaskAction.NAME, // needed for ShardFollowTaskCleaner + CompletionPersistentTaskAction.INSTANCE.name(), // needed for ShardFollowTaskCleaner "indices:data/write/*", // needed for SystemIndexMigrator "indices:data/read/*", // needed for SystemIndexMigrator "indices:admin/refresh", // needed for SystemIndexMigrator diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@settings-logsdb.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings-logsdb.json new file mode 100644 index 0000000000000..eabdd6fb9fad2 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings-logsdb.json @@ -0,0 +1,26 @@ +{ + "template": { + "settings": { + "index": { + "lifecycle": { + "name": "logs" + }, + "mode": "logsdb", + "codec": "best_compression", + "mapping": { + "ignore_malformed": true, + "total_fields": { + "ignore_dynamic_beyond_limit": true + } + }, + "default_pipeline": "logs@default-pipeline" + } + } + }, + "_meta": { + "description": "default settings for the logs index template installed by x-pack", + "managed": true + }, + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json index e9a9f2611ad7b..ca2659b8d8dea 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json @@ -5,7 +5,6 @@ "lifecycle": { "name": "logs" }, - "mode": "${xpack.stack.template.logsdb.index.mode}", "codec": "best_compression", "mapping": { "ignore_malformed": true, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/StringUtils.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/StringUtils.java index cd0ade2054ce6..1bfd94730c4fc 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/StringUtils.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/StringUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -27,7 +28,6 @@ import java.util.StringJoiner; import static java.util.stream.Collectors.toList; -import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR; import static org.elasticsearch.transport.RemoteClusterAware.buildRemoteIndexName; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.isUnsignedLong; @@ -378,10 +378,8 @@ public static String ordinal(int i) { } public static Tuple splitQualifiedIndex(String indexName) { - int separatorOffset = indexName.indexOf(REMOTE_CLUSTER_INDEX_SEPARATOR); - return separatorOffset > 0 - ? Tuple.tuple(indexName.substring(0, separatorOffset), indexName.substring(separatorOffset + 1)) - : Tuple.tuple(null, indexName); + String[] split = RemoteClusterAware.splitIndexName(indexName); + return Tuple.tuple(split[0], split[1]); } public static String qualifyAndJoinIndices(String cluster, String[] indices) { @@ -393,7 +391,7 @@ public static String qualifyAndJoinIndices(String cluster, String[] indices) { } public static boolean isQualified(String indexWildcard) { - return indexWildcard.indexOf(REMOTE_CLUSTER_INDEX_SEPARATOR) > 0; + return RemoteClusterAware.isRemoteIndexName(indexWildcard); } public static boolean isInteger(String value) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxFloatAggregator.java index eea436541069e..f5d4537c3c370 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MaxFloatAggregator.java @@ -16,7 +16,7 @@ class MaxFloatAggregator { public static float init() { - return Float.MIN_VALUE; + return -Float.MAX_VALUE; } public static float combine(float current, float v) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java index 0abcb05a91af6..e4da581a59136 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunctionTests.java @@ -27,7 +27,8 @@ public class MaxFloatGroupingAggregatorFunctionTests extends GroupingAggregatorF protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongFloatTupleBlockSourceOperator( blockFactory, - LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomFloat())) + LongStream.range(0, end) + .mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomFloatBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true))) ); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv index 7e857f5243f58..83a2f3cb1c281 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv @@ -1,9 +1,9 @@ -millis:date,nanos:date_nanos -2023-10-23T13:55:01.543Z,2023-10-23T13:55:01.543123456Z -2023-10-23T13:53:55.832Z,2023-10-23T13:53:55.832987654Z -2023-10-23T13:52:55.015Z,2023-10-23T13:52:55.015787878Z -2023-10-23T13:51:54.732Z,2023-10-23T13:51:54.732102837Z -2023-10-23T13:33:34.937Z,2023-10-23T13:33:34.937193000Z -2023-10-23T12:27:28.948Z,2023-10-23T12:27:28.948000000Z -2023-10-23T12:15:03.360Z,2023-10-23T12:15:03.360103847Z -1999-10-23T12:15:03.360Z,[2023-03-23T12:15:03.360103847Z, 2023-02-23T13:33:34.937193000Z, 2023-01-23T13:55:01.543123456Z] +millis:date,nanos:date_nanos,num:long +2023-10-23T13:55:01.543Z,2023-10-23T13:55:01.543123456Z,1698069301543123456 +2023-10-23T13:53:55.832Z,2023-10-23T13:53:55.832987654Z,1698069235832987654 +2023-10-23T13:52:55.015Z,2023-10-23T13:52:55.015787878Z,1698069175015787878 +2023-10-23T13:51:54.732Z,2023-10-23T13:51:54.732102837Z,1698069114732102837 +2023-10-23T13:33:34.937Z,2023-10-23T13:33:34.937193000Z,1698068014937193000 +2023-10-23T12:27:28.948Z,2023-10-23T12:27:28.948000000Z,1698064048948000000 +2023-10-23T12:15:03.360Z,2023-10-23T12:15:03.360103847Z,1698063303360103847 +1999-10-23T12:15:03.360Z,[2023-03-23T12:15:03.360103847Z, 2023-02-23T13:33:34.937193000Z, 2023-01-23T13:55:01.543123456Z], 0 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index b77689e1b5768..ad7149b0f742f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -70,3 +70,196 @@ FROM date_nanos | SORT millis asc | EVAL nanos = MV_LAST(nanos) | KEEP nanos | L nanos:date_nanos 2023-03-23T12:15:03.360103847Z ; + +string to date nanos +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS("2023-03-23T12:15:03.360103847"); + +d:date_nanos +2023-03-23T12:15:03.360103847Z +; + +string to date nanos, :: notation +required_capability: to_date_nanos + +ROW d = "2023-03-23T12:15:03.360103847"::date_nanos; + +d:date_nanos +2023-03-23T12:15:03.360103847Z +; + +string to date nanos, milliseconds only +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS("2023-03-23T12:15:03.360"); + +d:date_nanos +2023-03-23T12:15:03.360Z +; + +string to date nanos, out of range +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS("2262-04-12T00:00:00.000"); +warning:Line 1:9: evaluation of [TO_DATE_NANOS(\"2262-04-12T00:00:00.000\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:9: java.lang.IllegalArgumentException: date[2262-04-12T00:00:00Z] is after 2262-04-11T23:47:16.854775807 and cannot be stored in nanosecond resolution + +d:date_nanos +null +; + +string to date nanos, pre 1970 +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS("1969-04-12T00:00:00.000"); +warning:Line 1:9: evaluation of [TO_DATE_NANOS(\"1969-04-12T00:00:00.000\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:9: java.lang.IllegalArgumentException: date[1969-04-12T00:00:00Z] is before the epoch in 1970 and cannot be stored in nanosecond resolution + +d:date_nanos +null +; + +long to date nanos +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS(1724160894123456789); + +d:date_nanos +2024-08-20T13:34:54.123456789Z +; + +long to date nanos, :: notation +required_capability: to_date_nanos + +ROW d = 1724160894123456789::date_nanos; + +d:date_nanos +2024-08-20T13:34:54.123456789Z +; + + +long to date nanos, before 1970 +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS(TO_LONG(-1)); + +warning:Line 1:9: evaluation of [TO_DATE_NANOS(TO_LONG(-1))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:9: java.lang.IllegalArgumentException: Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported. +d:date_nanos +null +; + +unsigned long to date nanos +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS(TO_UNSIGNED_LONG(1724160894123456789)); + +d:date_nanos +2024-08-20T13:34:54.123456789Z +; + +double to date nanos +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS(1724160894123456789.0); + +d:date_nanos +# Note we've lost some precision here +2024-08-20T13:34:54.123456768Z +; + +datetime to date nanos, in range +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS(TO_DATETIME("2024-08-20T13:34:54.123Z")); + +d:date_nanos +2024-08-20T13:34:54.123000000Z +; + +datetime to date nanos, with overflow +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS(TO_DATETIME("2262-04-12T00:00:00.000")); +warning:Line 1:9: evaluation of [TO_DATE_NANOS(TO_DATETIME(\"2262-04-12T00:00:00.000\"))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:9: java.lang.IllegalArgumentException: milliSeconds [9223372800000] are after 2262-04-11T23:47:16.854775807 and cannot be converted to nanoseconds + +d:date_nanos +null +; + +datetime to date nanos, pre 1970 +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS(TO_DATETIME("1969-04-12T00:00:00.000")); +warning:Line 1:9: evaluation of [TO_DATE_NANOS(TO_DATETIME(\"1969-04-12T00:00:00.000\"))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:9: java.lang.IllegalArgumentException: milliSeconds [-22809600000] are before the epoch in 1970 and cannot be converted to nanoseconds + +d:date_nanos +null +; + + +date nanos to long, index version +required_capability: to_date_nanos + +FROM date_nanos | WHERE millis > "2020-02-02" | EVAL l = TO_LONG(nanos) | SORT nanos DESC | KEEP l; + +l:long +1698069301543123456 +1698069235832987654 +1698069175015787878 +1698069114732102837 +1698068014937193000 +1698064048948000000 +1698063303360103847 +; + +long to date nanos, index version +required_capability: to_date_nanos + +FROM date_nanos | WHERE millis > "2020-02-02" | EVAL d = TO_DATE_NANOS(num) | SORT nanos DESC | KEEP d; + +d:date_nanos +2023-10-23T13:55:01.543123456Z +2023-10-23T13:53:55.832987654Z +2023-10-23T13:52:55.015787878Z +2023-10-23T13:51:54.732102837Z +2023-10-23T13:33:34.937193000Z +2023-10-23T12:27:28.948000000Z +2023-10-23T12:15:03.360103847Z +; + +date_nanos to date nanos, index version +required_capability: to_date_nanos + +FROM date_nanos | WHERE millis > "2020-02-02" | EVAL d = TO_DATE_NANOS(nanos) | SORT nanos DESC | KEEP d; + +d:date_nanos +2023-10-23T13:55:01.543123456Z +2023-10-23T13:53:55.832987654Z +2023-10-23T13:52:55.015787878Z +2023-10-23T13:51:54.732102837Z +2023-10-23T13:33:34.937193000Z +2023-10-23T12:27:28.948000000Z +2023-10-23T12:15:03.360103847Z +; + +attempt to cast the result of a fold to date nanos +required_capability: to_date_nanos + +ROW d = TO_DATE_NANOS(CONCAT("2023-01-01","T12:12:12")); + +d:date_nanos +2023-01-01T12:12:12.000000000Z +; + +attempt to cast nulls to date nanos +required_capability: to_date_nanos + +ROW a = TO_DATE_NANOS(null), b = TO_DATE_NANOS(null + 1::long), c = TO_DATE_NANOS(CONCAT("2024", null)); + +a:date_nanos | b:date_nanos | c:date_nanos +null | null | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-date_nanos.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-date_nanos.json index 506290d90b4b0..a07f9eeeca7b8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-date_nanos.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-date_nanos.json @@ -5,6 +5,9 @@ }, "nanos": { "type": "date_nanos" + }, + "num": { + "type": "long" } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 2b3fa9dec797d..13c3857a5c497 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -97,6 +97,8 @@ double tau() "boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" "cartesian_point to_cartesianpoint(field:cartesian_point|keyword|text)" "cartesian_shape to_cartesianshape(field:cartesian_point|cartesian_shape|keyword|text)" +"date_nanos to_date_nanos(field:date|date_nanos|keyword|text|double|long|unsigned_long)" +"date_nanos to_datenanos(field:date|date_nanos|keyword|text|double|long|unsigned_long)" "date_period to_dateperiod(field:date_period|keyword|text)" "date to_datetime(field:date|date_nanos|keyword|text|double|long|unsigned_long|integer)" "double to_dbl(field:boolean|date|keyword|text|double|long|unsigned_long|integer|counter_double|counter_integer|counter_long)" @@ -227,6 +229,8 @@ to_bool |field |"boolean|keyword|text|double to_boolean |field |"boolean|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. to_cartesianpo|field |"cartesian_point|keyword|text" |Input value. The input can be a single- or multi-valued column or an expression. to_cartesiansh|field |"cartesian_point|cartesian_shape|keyword|text" |Input value. The input can be a single- or multi-valued column or an expression. +to_date_nanos |field |"date|date_nanos|keyword|text|double|long|unsigned_long" |Input value. The input can be a single- or multi-valued column or an expression. +to_datenanos |field |"date|date_nanos|keyword|text|double|long|unsigned_long" |Input value. The input can be a single- or multi-valued column or an expression. to_dateperiod |field |"date_period|keyword|text" |Input value. The input is a valid constant date period expression. to_datetime |field |"date|date_nanos|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. to_dbl |field |"boolean|date|keyword|text|double|long|unsigned_long|integer|counter_double|counter_integer|counter_long" |Input value. The input can be a single- or multi-valued column or an expression. @@ -357,6 +361,8 @@ to_bool |Converts an input value to a boolean value. A string value of *tr to_boolean |Converts an input value to a boolean value. A string value of *true* will be case-insensitive converted to the Boolean *true*. For anything else, including the empty string, the function will return *false*. The numerical value of *0* will be converted to *false*, anything else will be converted to *true*. to_cartesianpo|Converts an input value to a `cartesian_point` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT Point] format. to_cartesiansh|Converts an input value to a `cartesian_shape` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT] format. +to_date_nanos |Converts an input to a nanosecond-resolution date value (aka date_nanos). +to_datenanos |Converts an input to a nanosecond-resolution date value (aka date_nanos). to_dateperiod |Converts an input value into a `date_period` value. to_datetime |Converts an input value to a date value. A string will only be successfully converted if it's respecting the format `yyyy-MM-dd'T'HH:mm:ss.SSS'Z'`. To convert dates in other formats, use <>. to_dbl |Converts an input value to a double value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to double. Boolean *true* will be converted to double *1.0*, *false* to *0.0*. @@ -489,6 +495,8 @@ to_bool |boolean to_boolean |boolean |false |false |false to_cartesianpo|cartesian_point |false |false |false to_cartesiansh|cartesian_shape |false |false |false +to_date_nanos |date_nanos |false |false |false +to_datenanos |date_nanos |false |false |false to_dateperiod |date_period |false |false |false to_datetime |date |false |false |false to_dbl |double |false |false |false @@ -536,5 +544,5 @@ required_capability: meta meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -119 | 119 | 119 +121 | 121 | 121 ; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java new file mode 100644 index 0000000000000..e00e7e044ae12 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java @@ -0,0 +1,122 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. + * This class is generated. Do not edit it. + */ +public final class ToDateNanosFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToDateNanosFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToDateNanosFromDatetime"; + } + + @Override + public Block evalVector(Vector v) { + LongVector vector = (LongVector) v; + int positionCount = v.getPositionCount(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendLong(evalValue(vector, p)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static long evalValue(LongVector container, int index) { + long value = container.getLong(index); + return ToDateNanos.fromDatetime(value); + } + + @Override + public Block evalBlock(Block b) { + LongBlock block = (LongBlock) b; + int positionCount = block.getPositionCount(); + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + long value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static long evalValue(LongBlock container, int index) { + long value = container.getLong(index); + return ToDateNanos.fromDatetime(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDateNanosFromDatetimeEvaluator get(DriverContext context) { + return new ToDateNanosFromDatetimeEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDateNanosFromDatetimeEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java new file mode 100644 index 0000000000000..23b30e669241b --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java @@ -0,0 +1,124 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. + * This class is generated. Do not edit it. + */ +public final class ToDateNanosFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToDateNanosFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToDateNanosFromDouble"; + } + + @Override + public Block evalVector(Vector v) { + DoubleVector vector = (DoubleVector) v; + int positionCount = v.getPositionCount(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + } catch (IllegalArgumentException | InvalidArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendLong(evalValue(vector, p)); + } catch (IllegalArgumentException | InvalidArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static long evalValue(DoubleVector container, int index) { + double value = container.getDouble(index); + return ToDateNanos.fromDouble(value); + } + + @Override + public Block evalBlock(Block b) { + DoubleBlock block = (DoubleBlock) b; + int positionCount = block.getPositionCount(); + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + long value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (IllegalArgumentException | InvalidArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static long evalValue(DoubleBlock container, int index) { + double value = container.getDouble(index); + return ToDateNanos.fromDouble(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDateNanosFromDoubleEvaluator get(DriverContext context) { + return new ToDateNanosFromDoubleEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDateNanosFromDoubleEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java new file mode 100644 index 0000000000000..cc52208ce5a25 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java @@ -0,0 +1,122 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. + * This class is generated. Do not edit it. + */ +public final class ToDateNanosFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToDateNanosFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToDateNanosFromLong"; + } + + @Override + public Block evalVector(Vector v) { + LongVector vector = (LongVector) v; + int positionCount = v.getPositionCount(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendLong(evalValue(vector, p)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static long evalValue(LongVector container, int index) { + long value = container.getLong(index); + return ToDateNanos.fromLong(value); + } + + @Override + public Block evalBlock(Block b) { + LongBlock block = (LongBlock) b; + int positionCount = block.getPositionCount(); + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + long value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static long evalValue(LongBlock container, int index) { + long value = container.getLong(index); + return ToDateNanos.fromLong(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDateNanosFromLongEvaluator get(DriverContext context) { + return new ToDateNanosFromLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDateNanosFromLongEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java new file mode 100644 index 0000000000000..c5a20ac298da7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java @@ -0,0 +1,126 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. + * This class is generated. Do not edit it. + */ +public final class ToDateNanosFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToDateNanosFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToDateNanosFromString"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendLong(evalValue(vector, p, scratchPad)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToDateNanos.fromKeyword(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + long value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static long evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToDateNanos.fromKeyword(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDateNanosFromStringEvaluator get(DriverContext context) { + return new ToDateNanosFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDateNanosFromStringEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index f714d4d1808c1..f0fa89dedd9ab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -273,6 +273,11 @@ public enum Cap { */ DATE_NANOS_TYPE(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** + * Support for to_date_nanos function + */ + TO_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** * Support CIDRMatch in CombineDisjunctions rule. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 9288e1cf81a15..90957f55141b9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -34,10 +34,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; -import org.elasticsearch.xpack.esql.core.rule.Rule; -import org.elasticsearch.xpack.esql.core.rule.RuleExecutor; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -81,6 +77,10 @@ import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; +import org.elasticsearch.xpack.esql.rule.ParameterizedRule; +import org.elasticsearch.xpack.esql.rule.ParameterizedRuleExecutor; +import org.elasticsearch.xpack.esql.rule.Rule; +import org.elasticsearch.xpack.esql.rule.RuleExecutor; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -1070,12 +1070,12 @@ private static Expression processBinaryOperator(BinaryOperator o) { private static Expression processIn(In in) { Expression left = in.value(); List right = in.list(); - DataType targetDataType = left.dataType(); - if (left.resolved() == false || supportsStringImplicitCasting(targetDataType) == false) { + if (left.resolved() == false || supportsStringImplicitCasting(left.dataType()) == false) { return in; } + DataType targetDataType = left.dataType(); List newChildren = new ArrayList<>(right.size() + 1); boolean childrenChanged = false; @@ -1107,23 +1107,26 @@ private static Expression castMixedNumericTypes(EsqlScalarFunction f, DataType t DataType childDataType; for (Expression e : f.children()) { - childDataType = e.dataType(); - if (childDataType.isNumeric() == false - || childDataType == targetNumericType - || canCastNumeric(childDataType, targetNumericType) == false) { + if (e.resolved()) { + childDataType = e.dataType(); + if (childDataType.isNumeric() == false + || childDataType == targetNumericType + || canCastNumeric(childDataType, targetNumericType) == false) { + newChildren.add(e); + continue; + } + childrenChanged = true; + // add a casting function + switch (targetNumericType) { + case INTEGER -> newChildren.add(new ToInteger(e.source(), e)); + case LONG -> newChildren.add(new ToLong(e.source(), e)); + case DOUBLE -> newChildren.add(new ToDouble(e.source(), e)); + case UNSIGNED_LONG -> newChildren.add(new ToUnsignedLong(e.source(), e)); + default -> throw new EsqlIllegalArgumentException("unexpected data type: " + targetNumericType); + } + } else { newChildren.add(e); - continue; } - childrenChanged = true; - // add a casting function - switch (targetNumericType) { - case INTEGER -> newChildren.add(new ToInteger(e.source(), e)); - case LONG -> newChildren.add(new ToLong(e.source(), e)); - case DOUBLE -> newChildren.add(new ToDouble(e.source(), e)); - case UNSIGNED_LONG -> newChildren.add(new ToUnsignedLong(e.source(), e)); - default -> throw new EsqlIllegalArgumentException("unexpected data type: " + targetNumericType); - } - } return childrenChanged ? f.replaceChildren(newChildren) : f; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerRules.java index 89f9d694e3a16..4889583d1b8a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerRules.java @@ -9,9 +9,9 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.rule.ParameterizedRule; +import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; import java.util.Collection; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index f96742b5a4d91..96ccf8b01e5bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -43,6 +43,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatePeriod; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; @@ -349,6 +350,7 @@ private FunctionDefinition[][] functions() { def(ToCartesianShape.class, ToCartesianShape::new, "to_cartesianshape"), def(ToDatePeriod.class, ToDatePeriod::new, "to_dateperiod"), def(ToDatetime.class, ToDatetime::new, "to_datetime", "to_dt"), + def(ToDateNanos.class, ToDateNanos::new, "to_date_nanos", "to_datenanos"), def(ToDegrees.class, ToDegrees::new, "to_degrees"), def(ToDouble.class, ToDouble::new, "to_double", "to_dbl"), def(ToGeoPoint.class, ToGeoPoint::new, "to_geopoint"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java index bdbc9b649c101..4d34033286f52 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; @@ -107,6 +108,7 @@ public static List getNamedWriteables() { entries.add(ToBoolean.ENTRY); entries.add(ToCartesianPoint.ENTRY); entries.add(ToDatetime.ENTRY); + entries.add(ToDateNanos.ENTRY); entries.add(ToDegrees.ENTRY); entries.add(ToDouble.ENTRY); entries.add(ToGeoShape.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanos.java new file mode 100644 index 0000000000000..9a6a91b7ccedd --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanos.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.time.DateUtils; +import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; + +import java.io.IOException; +import java.time.Instant; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_NANOS_FORMATTER; + +public class ToDateNanos extends AbstractConvertFunction { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "ToDateNanos", + ToDateNanos::new + ); + + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(DATETIME, ToDateNanosFromDatetimeEvaluator.Factory::new), + Map.entry(DATE_NANOS, (field, source) -> field), + Map.entry(LONG, ToDateNanosFromLongEvaluator.Factory::new), + Map.entry(KEYWORD, ToDateNanosFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToDateNanosFromStringEvaluator.Factory::new), + Map.entry(DOUBLE, ToDateNanosFromDoubleEvaluator.Factory::new), + Map.entry(UNSIGNED_LONG, ToLongFromUnsignedLongEvaluator.Factory::new) + /* + NB: not including an integer conversion, because max int in nanoseconds is like 2 seconds after epoch, and it seems more likely + a user who tries to convert an int to a nanosecond date has made a mistake that we should catch that at parse time. + TO_DATE_NANOS(TO_LONG(intVal)) is still possible if someone really needs to do this. + */ + ); + + @FunctionInfo( + returnType = "date_nanos", + description = "Converts an input to a nanosecond-resolution date value (aka date_nanos).", + note = "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers " + + "cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", + preview = true + ) + public ToDateNanos( + Source source, + @Param( + name = "field", + type = { "date", "date_nanos", "keyword", "text", "double", "long", "unsigned_long" }, + description = "Input value. The input can be a single- or multi-valued column or an expression." + ) Expression field + ) { + super(source, field); + } + + protected ToDateNanos(StreamInput in) throws IOException { + super(in); + } + + @Override + public DataType dataType() { + return DATE_NANOS; + } + + @Override + protected Map factories() { + return EVALUATORS; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToDateNanos(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToDateNanos::new, field()); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @ConvertEvaluator(extraName = "FromLong", warnExceptions = { IllegalArgumentException.class }) + static long fromLong(long in) { + if (in < 0L) { + throw new IllegalArgumentException("Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported."); + } + return in; + } + + @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static long fromDouble(double in) { + if (in < 0d) { + throw new IllegalArgumentException("Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported."); + } + return DataTypeConverter.safeDoubleToLong(in); + } + + @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) + static long fromKeyword(BytesRef in) { + Instant parsed = DateFormatters.from(DEFAULT_DATE_NANOS_FORMATTER.parse(in.utf8ToString())).toInstant(); + return DateUtils.toLong(parsed); + } + + @ConvertEvaluator(extraName = "FromDatetime", warnExceptions = { IllegalArgumentException.class }) + static long fromDatetime(long in) { + return DateUtils.toNanoSeconds(in); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index 8c54b61dc803d..44334ff112bad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.optimizer; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.esql.optimizer.rules.logical.PropagateEmptyRelation; import org.elasticsearch.xpack.esql.optimizer.rules.logical.local.InferIsNotNull; import org.elasticsearch.xpack.esql.optimizer.rules.logical.local.InferNonNullAggConstraint; @@ -15,6 +14,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.local.ReplaceMissingFieldWithNull; import org.elasticsearch.xpack.esql.optimizer.rules.logical.local.ReplaceTopNWithLimitAndSort; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.rule.ParameterizedRuleExecutor; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 20f3e5c9150e5..48bafd8eef00e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -9,8 +9,6 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.common.Failure; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.EnableSpatialDistancePushdown; import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.InsertFieldExtraction; import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.PushFiltersToSource; @@ -20,6 +18,8 @@ import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.ReplaceSourceAttributes; import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.SpatialDocValuesExtraction; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.rule.ParameterizedRuleExecutor; +import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; import java.util.Collection; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 459e3f4d0284c..bfbf5a8f0c66f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.common.Failures; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.optimizer.rules.logical.AddDefaultTopN; import org.elasticsearch.xpack.esql.optimizer.rules.logical.BooleanFunctionEqualsElimination; @@ -58,6 +57,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.SubstituteSurrogates; import org.elasticsearch.xpack.esql.optimizer.rules.logical.TranslateMetricsAggregate; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.rule.ParameterizedRuleExecutor; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java index 644bfa7b807ef..482a89b50c865 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java @@ -8,11 +8,11 @@ package org.elasticsearch.xpack.esql.optimizer; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.TransformDirection; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.rule.ParameterizedRule; +import org.elasticsearch.xpack.esql.rule.Rule; public class PhysicalOptimizerRules { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java index 03b9705fefc79..19f6bc810d01f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java @@ -9,11 +9,11 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.common.Failure; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; -import org.elasticsearch.xpack.esql.core.rule.RuleExecutor; import org.elasticsearch.xpack.esql.optimizer.rules.physical.ProjectAwayColumns; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.rule.ParameterizedRuleExecutor; +import org.elasticsearch.xpack.esql.rule.RuleExecutor; import java.util.Collection; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/OptimizerRules.java index f087fab06828e..2a0b2a6af36aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/OptimizerRules.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.rule.ParameterizedRule; +import org.elasticsearch.xpack.esql.rule.Rule; public final class OptimizerRules { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEvalFoldables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEvalFoldables.java index 139f192d3c14e..73eaa9220fd84 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEvalFoldables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEvalFoldables.java @@ -12,10 +12,10 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.rule.Rule; /** * Replace any reference attribute with its source, if it does not affect the result. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java index 62f4e391f13ec..e01608c546090 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -23,6 +22,7 @@ import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAliasingEvalWithProject.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAliasingEvalWithProject.java index e57a95f0f7dad..dc9421a22a69c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAliasingEvalWithProject.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAliasingEvalWithProject.java @@ -12,12 +12,12 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SetAsOptimized.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SetAsOptimized.java index c9a2b44e40ebf..f31b1ef0ceaf4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SetAsOptimized.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SetAsOptimized.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.rule.Rule; public final class SetAsOptimized extends Rule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferIsNotNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferIsNotNull.java index 0e5bb74d1cdf9..d161071fe2839 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferIsNotNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferIsNotNull.java @@ -13,11 +13,11 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.rule.Rule; import java.util.LinkedHashSet; import java.util.Set; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java index b13667465ce63..0fa6d61a0ca9b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -25,6 +24,7 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.stats.SearchStats; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java index bee27acd06ec0..290ae2d3ff1be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -22,6 +21,7 @@ import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; +import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java index 7186a5194a262..c215e86b0045a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java @@ -12,12 +12,12 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; -import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.optimizer.rules.physical.ProjectAwayColumns; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; +import org.elasticsearch.xpack.esql.rule.Rule; import java.util.LinkedHashSet; import java.util.LinkedList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/TableIdentifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/TableIdentifier.java index ceefe4e254557..532d93eec48af 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/TableIdentifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/TableIdentifier.java @@ -10,6 +10,8 @@ import java.util.Objects; +import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR; + public class TableIdentifier { private final Source source; @@ -55,7 +57,7 @@ public Source source() { } public String qualifiedIndex() { - return cluster != null ? cluster + ":" + index : index; + return cluster != null ? cluster + REMOTE_CLUSTER_INDEX_SEPARATOR + index : index; } @Override @@ -63,7 +65,7 @@ public String toString() { StringBuilder builder = new StringBuilder(); if (cluster != null) { builder.append(cluster); - builder.append(":"); + builder.append(REMOTE_CLUSTER_INDEX_SEPARATOR); } builder.append(index); return builder.toString(); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/ParameterizedRule.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/ParameterizedRule.java similarity index 92% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/ParameterizedRule.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/ParameterizedRule.java index 5aa7318cb74b1..ba771d503da08 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/ParameterizedRule.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/ParameterizedRule.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.rule; +package org.elasticsearch.xpack.esql.rule; import org.elasticsearch.xpack.esql.core.tree.Node; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/ParameterizedRuleExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/ParameterizedRuleExecutor.java similarity index 95% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/ParameterizedRuleExecutor.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/ParameterizedRuleExecutor.java index bfce2b42c0328..fedef03799093 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/ParameterizedRuleExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/ParameterizedRuleExecutor.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.rule; +package org.elasticsearch.xpack.esql.rule; import org.elasticsearch.xpack.esql.core.tree.Node; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/Rule.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/Rule.java similarity index 96% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/Rule.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/Rule.java index 163b1f89f2abb..d8b16d350e3ea 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/Rule.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/Rule.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.rule; +package org.elasticsearch.xpack.esql.rule; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/RuleExecutionException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/RuleExecutionException.java similarity index 91% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/RuleExecutionException.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/RuleExecutionException.java index 393fd3765a01a..218d14297e956 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/RuleExecutionException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/RuleExecutionException.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.rule; +package org.elasticsearch.xpack.esql.rule; import org.elasticsearch.xpack.esql.core.QlServerException; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/RuleExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/RuleExecutor.java similarity index 99% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/RuleExecutor.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/RuleExecutor.java index ba873e690be7e..3d73c0d45e9a0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/rule/RuleExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/rule/RuleExecutor.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.rule; +package org.elasticsearch.xpack.esql.rule; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index 0c530bd0eb273..edc3081a33681 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatePeriod; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; @@ -63,6 +64,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; @@ -96,6 +98,7 @@ public class EsqlDataTypeConverter { public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time"); + public static final DateFormatter DEFAULT_DATE_NANOS_FORMATTER = DateFormatter.forPattern("strict_date_optional_time_nanos"); public static final DateFormatter HOUR_MINUTE_SECOND = DateFormatter.forPattern("strict_hour_minute_second_fraction"); @@ -104,6 +107,7 @@ public class EsqlDataTypeConverter { entry(CARTESIAN_POINT, ToCartesianPoint::new), entry(CARTESIAN_SHAPE, ToCartesianShape::new), entry(DATETIME, ToDatetime::new), + entry(DATE_NANOS, ToDateNanos::new), // ToDegrees, typeless entry(DOUBLE, ToDouble::new), entry(GEO_POINT, ToGeoPoint::new), @@ -499,7 +503,7 @@ public static String dateTimeToString(long dateTime) { } public static String nanoTimeToString(long dateTime) { - return DateFormatter.forPattern("strict_date_optional_time_nanos").formatNanos(dateTime); + return DEFAULT_DATE_NANOS_FORMATTER.formatNanos(dateTime); } public static String dateTimeToString(long dateTime, DateFormatter formatter) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 2012e319510af..35e553de61a78 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -283,6 +283,17 @@ public void testImplicitCastingErrorMessages() { "1:42: Cannot convert string [a] to [DOUBLE], error [Cannot parse number [a]]", error("ROW a=[3, 5, 1, 6] | EVAL avg_a = MV_AVG(\"a\")") ); + assertEquals( + "1:19: Unknown column [languages.*], did you mean any of [languages, languages.byte, languages.long, languages.short]?", + error("from test | where `languages.*` in (1, 2)") + ); + assertEquals("1:22: Unknown function [func]", error("from test | eval x = func(languages) | where x in (1, 2)")); + assertEquals( + "1:32: Unknown column [languages.*], did you mean any of [languages, languages.byte, languages.long, languages.short]?", + error("from test | eval x = coalesce( `languages.*`, languages, 0 )") + ); + String error = error("from test | eval x = func(languages) | eval y = coalesce(x, languages, 0 )"); + assertThat(error, containsString("function [func]")); } public void testAggsExpressionsInStatsAggs() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index e44ea907518b4..b3942a71edadb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -623,6 +623,7 @@ public static void forUnaryBoolean( /** * Generate positive test cases for a unary function operating on an {@link DataType#DATETIME}. + * This variant defaults to maximum range of possible values */ public static void forUnaryDatetime( List suppliers, @@ -641,6 +642,29 @@ public static void forUnaryDatetime( ); } + /** + * Generate positive test cases for a unary function operating on an {@link DataType#DATETIME}. + * This variant accepts a range of values + */ + public static void forUnaryDatetime( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + long min, + long max, + Function expectedValue, + List warnings + ) { + unaryNumeric( + suppliers, + expectedEvaluatorToString, + dateCases(min, max), + expectedType, + n -> expectedValue.apply(Instant.ofEpochMilli(n.longValue())), + warnings + ); + } + /** * Generate positive test cases for a unary function operating on an {@link DataType#DATE_NANOS}. */ @@ -1044,26 +1068,45 @@ public static List booleanCases() { *

*/ public static List dateCases() { - return List.of( - new TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataType.DATETIME), - new TypedDataSupplier( - "", - () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11), // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z - DataType.DATETIME - ), - new TypedDataSupplier( - "", - // 2286-11-20T17:46:40Z - +292278994-08-17T07:12:55.807Z - () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE), - DataType.DATETIME - ), - new TypedDataSupplier( - "", - // very close to +292278994-08-17T07:12:55.807Z, the maximum supported millis since epoch - () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE), - DataType.DATETIME - ) - ); + return dateCases(Long.MIN_VALUE, Long.MAX_VALUE); + } + + /** + * Generate cases for {@link DataType#DATETIME}. + *

+ * For multi-row parameters, see {@link MultiRowTestCaseSupplier#dateCases}. + *

+ */ + public static List dateCases(long min, long max) { + List cases = new ArrayList<>(); + if (min <= 0 && max >= 0) { + cases.add(new TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataType.DATETIME)); + } + + // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z + long lower1 = Math.max(min, 0); + long upper1 = Math.min(max, 10 * (long) 10e11); + if (lower1 < upper1) { + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower1, upper1), DataType.DATETIME)); + } + + // 2286-11-20T17:46:40Z - +292278994-08-17T07:12:55.807Z + long lower2 = Math.max(min, 10 * (long) 10e11); + long upper2 = Math.min(max, Long.MAX_VALUE); + if (lower2 < upper2) { + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower2, upper2), DataType.DATETIME)); + } + + // very close to +292278994-08-17T07:12:55.807Z, the maximum supported millis since epoch + long lower3 = Math.max(min, Long.MAX_VALUE / 100 * 99); + long upper3 = Math.min(max, Long.MAX_VALUE); + if (lower3 < upper3) { + cases.add( + new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower3, upper3), DataType.DATETIME) + ); + } + + return cases; } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java new file mode 100644 index 0000000000000..e91a5cc1ebca4 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.common.time.DateUtils; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +public class ToDateNanosTests extends AbstractScalarFunctionTestCase { + public ToDateNanosTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + final String read = "Attribute[channel=0]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryDateNanos(suppliers, read, DataType.DATE_NANOS, DateUtils::toLong, List.of()); + TestCaseSupplier.forUnaryDatetime( + suppliers, + "ToDateNanosFromDatetimeEvaluator[field=" + read + "]", + DataType.DATE_NANOS, + 0, + DateUtils.MAX_NANOSECOND_INSTANT.toEpochMilli(), + i -> DateUtils.toNanoSeconds(i.toEpochMilli()), + List.of() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + "ToDateNanosFromLongEvaluator[field=" + read + "]", + DataType.DATE_NANOS, + l -> l, + 0, + Long.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + "ToDateNanosFromLongEvaluator[field=" + read + "]", + DataType.DATE_NANOS, + l -> null, + Long.MIN_VALUE, + -1L, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported." + ) + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToLongFromUnsignedLongEvaluator[field=" + read + "]", + DataType.DATE_NANOS, + BigInteger::longValueExact, + BigInteger.ZERO, + BigInteger.valueOf(Long.MAX_VALUE), + List.of() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToLongFromUnsignedLongEvaluator[field=" + read + "]", + DataType.DATE_NANOS, + bi -> null, + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TWO), + UNSIGNED_LONG_MAX, + bi -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + bi + "] out of [long] range" + ) + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToDateNanosFromDoubleEvaluator[field=" + read + "]", + DataType.DATE_NANOS, + d -> null, + Double.NEGATIVE_INFINITY, + -Double.MIN_VALUE, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported." + ) + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToDateNanosFromDoubleEvaluator[field=" + read + "]", + DataType.DATE_NANOS, + d -> null, + 9.223372036854777E18, // a "convenient" value larger than `(double) Long.MAX_VALUE` (== ...776E18) + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + TestCaseSupplier.forUnaryStrings( + suppliers, + "ToDateNanosFromStringEvaluator[field=" + read + "]", + DataType.DATE_NANOS, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: " + + (bytesRef.utf8ToString().isEmpty() + ? "cannot parse empty datetime" + : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [strict_date_optional_time_nanos]")) + ) + ); + return parameterSuppliersFromTypedDataWithDefaultChecks( + true, + suppliers, + (v, p) -> "date_nanos or datetime or double or long or string or unsigned_long" + ); + } + + @Override + protected Expression build(Source source, List args) { + return new ToDateNanos(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPhysicalPlanOptimizer.java index 9c8886dbf0b6e..e26779e075b68 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPhysicalPlanOptimizer.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.esql.optimizer; -import org.elasticsearch.xpack.esql.core.rule.RuleExecutor; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.rule.RuleExecutor; public class TestPhysicalPlanOptimizer extends PhysicalPlanOptimizer { diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java index 0daf2d8a1ebf5..f3e57d61d1b36 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.protocol.xpack.frozen.FreezeRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -37,11 +38,17 @@ public final class RestFreezeIndexAction extends BaseRestHandler { private static final String UNFREEZE_DEPRECATED = "Frozen indices are deprecated because they provide no benefit given improvements " + "in heap memory utilization. They will be removed in a future release."; + @UpdateForV9 + // these routes were ".deprecated" in RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // this API in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return List.of( Route.builder(POST, "/{index}/_freeze").deprecated(FREEZE_REMOVED, RestApiVersion.V_7).build(), - Route.builder(POST, "/{index}/_unfreeze").deprecated(UNFREEZE_DEPRECATED, RestApiVersion.V_8).build() + // Route.builder(POST, "/{index}/_unfreeze").deprecated(UNFREEZE_DEPRECATED, RestApiVersion.V_8).build() + Route.builder(POST, "/{index}/_unfreeze").deprecateAndKeep(UNFREEZE_DEPRECATED).build() ); } diff --git a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java deleted file mode 100644 index 4961efd7253ec..0000000000000 --- a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.graph.rest.action; - -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; -import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; -import org.mockito.Mockito; - -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.instanceOf; - -public final class RestGraphActionTests extends RestActionTestCase { - private final List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); - - @Before - public void setUpAction() { - controller().registerHandler(new RestGraphAction()); - verifyingClient.setExecuteVerifier((actionType, request) -> { - assertThat(request, instanceOf(GraphExploreRequest.class)); - return Mockito.mock(GraphExploreResponse.class); - }); - } - - public void testTypeInPath() { - for (Tuple methodAndPath : List.of( - Tuple.tuple(RestRequest.Method.GET, "/some_index/some_type/_graph/explore"), - Tuple.tuple(RestRequest.Method.POST, "/some_index/some_type/_graph/explore"), - Tuple.tuple(RestRequest.Method.GET, "/some_index/some_type/_xpack/graph/_explore"), - Tuple.tuple(RestRequest.Method.POST, "/some_index/some_type/_xpack/graph/_explore") - )) { - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of( - "Accept", - compatibleMediaType, - "Content-Type", - Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)) - ) - ).withMethod(methodAndPath.v1()).withPath(methodAndPath.v2()).withContent(new BytesArray("{}"), null).build(); - - dispatchRequest(request); - assertCriticalWarnings(RestGraphAction.TYPES_DEPRECATION_MESSAGE); - } - } -} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java index a2793f9060d8a..59d3faf6489a6 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java @@ -84,6 +84,7 @@ public void testElser() throws IOException { final String inferenceId = "mixed-cluster-elser"; final String upgradedClusterId = "upgraded-cluster-elser"; + elserServer.enqueue(new MockResponse().setResponseCode(200).setBody(elserResponse())); put(inferenceId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); var configs = (List>) get(TaskType.SPARSE_EMBEDDING, inferenceId).get("endpoints"); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java index 36ee472cc0a13..9c9a377bbb001 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java @@ -117,6 +117,7 @@ public void testElser() throws IOException { var testTaskType = TaskType.SPARSE_EMBEDDING; if (isOldCluster()) { + elserServer.enqueue(new MockResponse().setResponseCode(200).setBody(elserResponse())); put(oldClusterId, elserConfig(getUrl(elserServer)), testTaskType); var configs = (List>) get(testTaskType, oldClusterId).get(old_cluster_endpoint_identifier); assertThat(configs, hasSize(1)); @@ -136,6 +137,7 @@ public void testElser() throws IOException { assertElser(oldClusterId); // New endpoint + elserServer.enqueue(new MockResponse().setResponseCode(200).setBody(elserResponse())); put(upgradedClusterId, elserConfig(getUrl(elserServer)), testTaskType); configs = (List>) get(upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index 10d8f90efef5b..daa29d33699ef 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -94,6 +94,7 @@ public void infer( Model model, @Nullable String query, List input, + boolean stream, Map taskSettings, InputType inputType, TimeValue timeout, diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java index fae11d5b53ca3..1894db6db8df6 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java @@ -85,6 +85,7 @@ public void infer( Model model, @Nullable String query, List input, + boolean stream, Map taskSettings, InputType inputType, TimeValue timeout, diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index fee9855b188c2..1a5df146a3aa4 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -88,6 +88,7 @@ public void infer( Model model, @Nullable String query, List input, + boolean stream, Map taskSettings, InputType inputType, TimeValue timeout, diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java index 3d72b1f2729b0..4313026e92521 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java @@ -85,6 +85,7 @@ public void infer( Model model, String query, List input, + boolean stream, Map taskSettings, InputType inputType, TimeValue timeout, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index fd330a8cf6cc6..30ccb48d5c709 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -10,6 +10,7 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; @@ -25,7 +26,8 @@ public Set getFeatures() { return Set.of( TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED, RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED, - SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID + SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID, + SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index 803e8f1e07612..4186b281a35b5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -114,6 +114,7 @@ private void inferOnService( model, request.getQuery(), request.getInput(), + request.isStreaming(), request.getTaskSettings(), request.getInputType(), request.getInferenceTimeout(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java index 9f2ceddc92a2e..4282e5d1e7cb9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpUtils.java @@ -46,7 +46,7 @@ private static String getStatusCodeErrorMessage(Request request, HttpResult resu } public static void checkForEmptyBody(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) { - if (result.isBodyEmpty()) { + if (result.isBodyEmpty() && (request.isStreaming() == false)) { String message = format("Response body was empty for request from inference entity id [%s]", request.getInferenceEntityId()); throttlerManager.warn(logger, message); throw new IllegalStateException(message); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java index a32e2018117f8..8cf411d84c932 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java @@ -21,13 +21,23 @@ public static DocumentsOnlyInput of(InferenceInputs inferenceInputs) { } private final List input; + private final boolean stream; - public DocumentsOnlyInput(List chunks) { + public DocumentsOnlyInput(List input) { + this(input, false); + } + + public DocumentsOnlyInput(List input, boolean stream) { super(); - this.input = Objects.requireNonNull(chunks); + this.input = Objects.requireNonNull(input); + this.stream = stream; } public List getInputs() { return this.input; } + + public boolean stream() { + return stream; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java index 0d5f98c180ba9..50bb77b307db3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java @@ -21,6 +21,19 @@ public static QueryAndDocsInputs of(InferenceInputs inferenceInputs) { } private final String query; + private final List chunks; + private final boolean stream; + + public QueryAndDocsInputs(String query, List chunks) { + this(query, chunks, false); + } + + public QueryAndDocsInputs(String query, List chunks, boolean stream) { + super(); + this.query = Objects.requireNonNull(query); + this.chunks = Objects.requireNonNull(chunks); + this.stream = stream; + } public String getQuery() { return query; @@ -30,12 +43,8 @@ public List getChunks() { return chunks; } - List chunks; - - public QueryAndDocsInputs(String query, List chunks) { - super(); - this.query = Objects.requireNonNull(query); - this.chunks = Objects.requireNonNull(chunks); + public boolean stream() { + return stream; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 0483296cd2c6a..e0ad044f597ab 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -40,6 +40,7 @@ import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.index.mapper.vectors.SparseVectorFieldMapper; +import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -54,6 +55,7 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.inference.queries.SemanticQueryInnerHitBuilder; import java.io.IOException; import java.util.ArrayList; @@ -468,7 +470,12 @@ public boolean fieldHasValue(FieldInfos fieldInfos) { return fieldInfos.fieldInfo(getEmbeddingsFieldName(name())) != null; } - public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost, String queryName) { + public QueryBuilder semanticQuery( + InferenceResults inferenceResults, + float boost, + String queryName, + SemanticQueryInnerHitBuilder semanticInnerHitBuilder + ) { String nestedFieldPath = getChunksFieldName(name()); String inferenceResultsFieldName = getEmbeddingsFieldName(name()); QueryBuilder childQueryBuilder; @@ -524,7 +531,10 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost }; } - return new NestedQueryBuilder(nestedFieldPath, childQueryBuilder, ScoreMode.Max).boost(boost).queryName(queryName); + InnerHitBuilder innerHitBuilder = semanticInnerHitBuilder != null ? semanticInnerHitBuilder.toInnerHitBuilder() : null; + return new NestedQueryBuilder(nestedFieldPath, childQueryBuilder, ScoreMode.Max).boost(boost) + .queryName(queryName) + .innerHit(innerHitBuilder); } private String generateQueryInferenceResultsTypeMismatchMessage(InferenceResults inferenceResults, String expectedResultsType) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 9f7fcb1ef407c..901de30145f7d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -16,6 +16,8 @@ import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -44,35 +46,46 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.TransportVersions.SEMANTIC_QUERY_INNER_HITS; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; public class SemanticQueryBuilder extends AbstractQueryBuilder { + public static final NodeFeature SEMANTIC_TEXT_INNER_HITS = new NodeFeature("semantic_text.inner_hits"); + public static final String NAME = "semantic"; private static final ParseField FIELD_FIELD = new ParseField("field"); private static final ParseField QUERY_FIELD = new ParseField("query"); + private static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME, false, - args -> new SemanticQueryBuilder((String) args[0], (String) args[1]) + args -> new SemanticQueryBuilder((String) args[0], (String) args[1], (SemanticQueryInnerHitBuilder) args[2]) ); static { PARSER.declareString(constructorArg(), FIELD_FIELD); PARSER.declareString(constructorArg(), QUERY_FIELD); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> SemanticQueryInnerHitBuilder.fromXContent(p), INNER_HITS_FIELD); declareStandardFields(PARSER); } private final String fieldName; private final String query; + private final SemanticQueryInnerHitBuilder innerHitBuilder; private final SetOnce inferenceResultsSupplier; private final InferenceResults inferenceResults; private final boolean noInferenceResults; public SemanticQueryBuilder(String fieldName, String query) { + this(fieldName, query, null); + } + + public SemanticQueryBuilder(String fieldName, String query, @Nullable SemanticQueryInnerHitBuilder innerHitBuilder) { if (fieldName == null) { throw new IllegalArgumentException("[" + NAME + "] requires a " + FIELD_FIELD.getPreferredName() + " value"); } @@ -81,15 +94,25 @@ public SemanticQueryBuilder(String fieldName, String query) { } this.fieldName = fieldName; this.query = query; + this.innerHitBuilder = innerHitBuilder; this.inferenceResults = null; this.inferenceResultsSupplier = null; this.noInferenceResults = false; + + if (this.innerHitBuilder != null) { + this.innerHitBuilder.setFieldName(fieldName); + } } public SemanticQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); this.query = in.readString(); + if (in.getTransportVersion().onOrAfter(SEMANTIC_QUERY_INNER_HITS)) { + this.innerHitBuilder = in.readOptionalWriteable(SemanticQueryInnerHitBuilder::new); + } else { + this.innerHitBuilder = null; + } this.inferenceResults = in.readOptionalNamedWriteable(InferenceResults.class); this.noInferenceResults = in.readBoolean(); this.inferenceResultsSupplier = null; @@ -102,6 +125,21 @@ protected void doWriteTo(StreamOutput out) throws IOException { } out.writeString(fieldName); out.writeString(query); + if (out.getTransportVersion().onOrAfter(SEMANTIC_QUERY_INNER_HITS)) { + out.writeOptionalWriteable(innerHitBuilder); + } else if (innerHitBuilder != null) { + throw new IllegalStateException( + "Transport version must be at least [" + + SEMANTIC_QUERY_INNER_HITS.toReleaseVersion() + + "] to use [ " + + INNER_HITS_FIELD.getPreferredName() + + "] in [" + + NAME + + "], current transport version is [" + + out.getTransportVersion().toReleaseVersion() + + "]. Are you running a mixed-version cluster?" + ); + } out.writeOptionalNamedWriteable(inferenceResults); out.writeBoolean(noInferenceResults); } @@ -114,6 +152,7 @@ private SemanticQueryBuilder( ) { this.fieldName = other.fieldName; this.query = other.query; + this.innerHitBuilder = other.innerHitBuilder; this.boost = other.boost; this.queryName = other.queryName; this.inferenceResultsSupplier = inferenceResultsSupplier; @@ -121,6 +160,10 @@ private SemanticQueryBuilder( this.noInferenceResults = noInferenceResults; } + public SemanticQueryInnerHitBuilder innerHit() { + return innerHitBuilder; + } + @Override public String getWriteableName() { return NAME; @@ -140,6 +183,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.startObject(NAME); builder.field(FIELD_FIELD.getPreferredName(), fieldName); builder.field(QUERY_FIELD.getPreferredName(), query); + if (innerHitBuilder != null) { + builder.field(INNER_HITS_FIELD.getPreferredName(), innerHitBuilder); + } boostAndQueryNameToXContent(builder); builder.endObject(); } @@ -166,7 +212,7 @@ private QueryBuilder doRewriteBuildSemanticQuery(SearchExecutionContext searchEx ); } - return semanticTextFieldType.semanticQuery(inferenceResults, boost(), queryName()); + return semanticTextFieldType.semanticQuery(inferenceResults, boost(), queryName(), innerHitBuilder); } else { throw new IllegalArgumentException( "Field [" + fieldName + "] of type [" + fieldType.typeName() + "] does not support " + NAME + " queries" @@ -301,11 +347,12 @@ private static String getInferenceIdForForField(Collection indexM protected boolean doEquals(SemanticQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(query, other.query) + && Objects.equals(innerHitBuilder, other.innerHitBuilder) && Objects.equals(inferenceResults, other.inferenceResults); } @Override protected int doHashCode() { - return Objects.hash(fieldName, query, inferenceResults); + return Objects.hash(fieldName, query, innerHitBuilder, inferenceResults); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryInnerHitBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryInnerHitBuilder.java new file mode 100644 index 0000000000000..776ce990665ac --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryInnerHitBuilder.java @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.queries; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.index.query.InnerHitBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.inference.mapper.SemanticTextField; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.index.query.InnerHitBuilder.DEFAULT_FROM; +import static org.elasticsearch.index.query.InnerHitBuilder.DEFAULT_SIZE; + +public class SemanticQueryInnerHitBuilder implements Writeable, ToXContentObject { + private static final ObjectParser PARSER = new ObjectParser<>( + "semantic_query_inner_hits", + SemanticQueryInnerHitBuilder::new + ); + + static { + PARSER.declareInt(SemanticQueryInnerHitBuilder::setFrom, SearchSourceBuilder.FROM_FIELD); + PARSER.declareInt(SemanticQueryInnerHitBuilder::setSize, SearchSourceBuilder.SIZE_FIELD); + } + + private String fieldName; + private int from = DEFAULT_FROM; + private int size = DEFAULT_SIZE; + + public SemanticQueryInnerHitBuilder() { + this.fieldName = null; + } + + public SemanticQueryInnerHitBuilder(StreamInput in) throws IOException { + fieldName = in.readOptionalString(); + from = in.readVInt(); + size = in.readVInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(fieldName); + out.writeVInt(from); + out.writeVInt(size); + } + + public String getFieldName() { + return fieldName; + } + + public void setFieldName(String fieldName) { + this.fieldName = fieldName; + } + + public int getFrom() { + return from; + } + + public SemanticQueryInnerHitBuilder setFrom(int from) { + this.from = from; + return this; + } + + public int getSize() { + return size; + } + + public SemanticQueryInnerHitBuilder setSize(int size) { + this.size = size; + return this; + } + + public InnerHitBuilder toInnerHitBuilder() { + if (fieldName == null) { + throw new IllegalStateException("fieldName must have a value"); + } + + return new InnerHitBuilder(fieldName).setFrom(from) + .setSize(size) + .setFetchSourceContext(FetchSourceContext.of(true, null, new String[] { SemanticTextField.getEmbeddingsFieldName(fieldName) })); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + // Don't include name in XContent because it is hard-coded + builder.startObject(); + if (from != DEFAULT_FROM) { + builder.field(SearchSourceBuilder.FROM_FIELD.getPreferredName(), from); + } + if (size != DEFAULT_SIZE) { + builder.field(SearchSourceBuilder.SIZE_FIELD.getPreferredName(), size); + } + builder.endObject(); + return builder; + } + + public static SemanticQueryInnerHitBuilder fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, new SemanticQueryInnerHitBuilder(), null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SemanticQueryInnerHitBuilder that = (SemanticQueryInnerHitBuilder) o; + return from == that.from && size == that.size && Objects.equals(fieldName, that.fieldName); + } + + @Override + public int hashCode() { + return Objects.hash(fieldName, from, size); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index 864aebcef124f..21b2df6af1ab6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -51,6 +51,7 @@ public void infer( Model model, @Nullable String query, List input, + boolean stream, Map taskSettings, InputType inputType, TimeValue timeout, @@ -58,9 +59,9 @@ public void infer( ) { init(); if (query != null) { - doInfer(model, new QueryAndDocsInputs(query, input), taskSettings, inputType, timeout, listener); + doInfer(model, new QueryAndDocsInputs(query, input, stream), taskSettings, inputType, timeout, listener); } else { - doInfer(model, new DocumentsOnlyInput(input), taskSettings, inputType, timeout, listener); + doInfer(model, new DocumentsOnlyInput(input, stream), taskSettings, inputType, timeout, listener); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 6c4904f8918a7..32c1d17373e53 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -202,6 +202,13 @@ public static ElasticsearchStatusException unknownSettingsError(Map invalidModelType) { + throw new ElasticsearchStatusException( + Strings.format("Can't update embedding details for model with unexpected type %s", invalidModelType), + RestStatus.BAD_REQUEST + ); + } + public static String missingSettingErrorMsg(String settingName, String scope) { return Strings.format("[%s] does not contain the required setting [%s]", scope, settingName); } @@ -659,6 +666,7 @@ public static void getEmbeddingSize(Model model, InferenceService service, Actio model, null, List.of(TEST_EMBEDDING_INPUT), + false, Map.of(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index 8f0c9896c6642..994bad194aef6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -309,6 +309,7 @@ private void checkAlibabaCloudSearchServiceConfig(Model model, InferenceService model, query, List.of(input), + false, Map.of(), InputType.INGEST, DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index cca8ae63e974c..93408c067098b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -323,6 +323,7 @@ public void infer( Model model, @Nullable String query, List input, + boolean stream, Map taskSettings, InputType inputType, TimeValue timeout, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java index 948117954a63f..746cb6e89fad0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java @@ -149,6 +149,7 @@ public void infer( Model model, @Nullable String query, List inputs, + boolean stream, Map taskSettings, InputType inputType, TimeValue timeout, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java index 08eb67ca744a4..422fc5b0ed720 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.inference.services.googleaistudio.completion.GoogleAiStudioCompletionModel; import org.elasticsearch.xpack.inference.services.googleaistudio.embeddings.GoogleAiStudioEmbeddingsModel; import org.elasticsearch.xpack.inference.services.googleaistudio.embeddings.GoogleAiStudioEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.List; import java.util.Map; @@ -187,30 +188,29 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void checkModelConfig(Model model, ActionListener listener) { - if (model instanceof GoogleAiStudioEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) - ); - } else { - listener.onResponse(model); - } + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); } - private GoogleAiStudioEmbeddingsModel updateModelWithEmbeddingDetails(GoogleAiStudioEmbeddingsModel model, int embeddingSize) { - var similarityFromModel = model.getServiceSettings().similarity(); - var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { + if (model instanceof GoogleAiStudioEmbeddingsModel embeddingsModel) { + var serviceSettings = embeddingsModel.getServiceSettings(); + var similarityFromModel = serviceSettings.similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; - GoogleAiStudioEmbeddingsServiceSettings serviceSettings = new GoogleAiStudioEmbeddingsServiceSettings( - model.getServiceSettings().modelId(), - model.getServiceSettings().maxInputTokens(), - embeddingSize, - similarityToUse, - model.getServiceSettings().rateLimitSettings() - ); + var updatedServiceSettings = new GoogleAiStudioEmbeddingsServiceSettings( + serviceSettings.modelId(), + serviceSettings.maxInputTokens(), + embeddingSize, + similarityToUse, + serviceSettings.rateLimitSettings() + ); - return new GoogleAiStudioEmbeddingsModel(model, serviceSettings); + return new GoogleAiStudioEmbeddingsModel(embeddingsModel, updatedServiceSettings); + } else { + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); + } } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index bdfa87e77b708..6b142edca80aa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.List; import java.util.Map; @@ -67,34 +68,31 @@ protected HuggingFaceModel createModel( @Override public void checkModelConfig(Model model, ActionListener listener) { + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); + } + + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { if (model instanceof HuggingFaceEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) + var serviceSettings = embeddingsModel.getServiceSettings(); + var similarityFromModel = serviceSettings.similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.COSINE : similarityFromModel; + + var updatedServiceSettings = new HuggingFaceServiceSettings( + serviceSettings.uri(), + similarityToUse, + embeddingSize, + embeddingsModel.getTokenLimit(), + serviceSettings.rateLimitSettings() ); + + return new HuggingFaceEmbeddingsModel(embeddingsModel, updatedServiceSettings); } else { - listener.onResponse(model); + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); } } - private static HuggingFaceEmbeddingsModel updateModelWithEmbeddingDetails(HuggingFaceEmbeddingsModel model, int embeddingSize) { - // default to cosine similarity - var similarity = model.getServiceSettings().similarity() == null - ? SimilarityMeasure.COSINE - : model.getServiceSettings().similarity(); - - var serviceSettings = new HuggingFaceServiceSettings( - model.getServiceSettings().uri(), - similarity, - embeddingSize, - model.getTokenLimit(), - model.getServiceSettings().rateLimitSettings() - ); - - return new HuggingFaceEmbeddingsModel(model, serviceSettings); - } - @Override protected void doChunkedInfer( Model model, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java index 1acc13f50778b..221951f7a621e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.mistral.embeddings.MistralEmbeddingsModel; import org.elasticsearch.xpack.inference.services.mistral.embeddings.MistralEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.List; import java.util.Map; @@ -214,32 +215,28 @@ private MistralEmbeddingsModel createModelFromPersistent( @Override public void checkModelConfig(Model model, ActionListener listener) { - if (model instanceof MistralEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateEmbeddingModelConfig(embeddingsModel, size))) - ); - } else { - listener.onResponse(model); - } + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); } - private MistralEmbeddingsModel updateEmbeddingModelConfig(MistralEmbeddingsModel embeddingsModel, int embeddingsSize) { - var embeddingServiceSettings = embeddingsModel.getServiceSettings(); - - var similarityFromModel = embeddingsModel.getServiceSettings().similarity(); - var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { + if (model instanceof MistralEmbeddingsModel embeddingsModel) { + var serviceSettings = embeddingsModel.getServiceSettings(); - MistralEmbeddingsServiceSettings serviceSettings = new MistralEmbeddingsServiceSettings( - embeddingServiceSettings.modelId(), - embeddingsSize, - embeddingServiceSettings.maxInputTokens(), - similarityToUse, - embeddingServiceSettings.rateLimitSettings() - ); + var similarityFromModel = embeddingsModel.getServiceSettings().similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; - return new MistralEmbeddingsModel(embeddingsModel, serviceSettings); + MistralEmbeddingsServiceSettings updatedServiceSettings = new MistralEmbeddingsServiceSettings( + serviceSettings.modelId(), + embeddingSize, + serviceSettings.maxInputTokens(), + similarityToUse, + serviceSettings.rateLimitSettings() + ); + return new MistralEmbeddingsModel(embeddingsModel, updatedServiceSettings); + } else { + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); + } } - } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 7cea1ec7df46c..f9565a915124f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -12,7 +12,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; @@ -35,6 +34,7 @@ import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsServiceSettings; @@ -307,10 +307,7 @@ public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { return new OpenAiEmbeddingsModel(embeddingsModel, updatedServiceSettings); } else { - throw new ElasticsearchStatusException( - Strings.format("Can't update embedding details for model with unexpected type %s", model.getClass()), - RestStatus.BAD_REQUEST - ); + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidator.java index 9fc5748746085..70f01e77b9369 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidator.java @@ -1,3 +1,4 @@ + /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License @@ -30,17 +31,29 @@ public void validate(InferenceService service, Model model, ActionListener { + ActionListener.wrap(r -> { if (r != null) { - delegate.onResponse(r); + listener.onResponse(r); } else { - delegate.onFailure( - new ElasticsearchStatusException("Could not make a validation call to the selected service", RestStatus.BAD_REQUEST) + listener.onFailure( + new ElasticsearchStatusException( + "Could not complete inference endpoint creation as validation call to service returned null response.", + RestStatus.BAD_REQUEST + ) ); } + }, e -> { + listener.onFailure( + new ElasticsearchStatusException( + "Could not complete inference endpoint creation as validation call to service threw an exception.", + RestStatus.BAD_REQUEST, + e + ) + ); }) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index f54ce89183079..47ac33a5cf9ab 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -31,7 +31,9 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.index.query.InnerHitContextBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.index.query.NestedQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; @@ -62,7 +64,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.apache.lucene.search.BooleanClause.Occur.FILTER; import static org.apache.lucene.search.BooleanClause.Occur.MUST; @@ -165,7 +169,14 @@ protected SemanticQueryBuilder doCreateTestQueryBuilder() { queryTokens.add(randomAlphaOfLength(QUERY_TOKEN_LENGTH)); } - SemanticQueryBuilder builder = new SemanticQueryBuilder(SEMANTIC_TEXT_FIELD, String.join(" ", queryTokens)); + SemanticQueryInnerHitBuilder innerHitBuilder = null; + if (randomBoolean()) { + innerHitBuilder = new SemanticQueryInnerHitBuilder(); + innerHitBuilder.setFrom(randomIntBetween(0, 100)); + innerHitBuilder.setSize(randomIntBetween(0, 100)); + } + + SemanticQueryBuilder builder = new SemanticQueryBuilder(SEMANTIC_TEXT_FIELD, String.join(" ", queryTokens), innerHitBuilder); if (randomBoolean()) { builder.boost((float) randomDoubleBetween(0.1, 10.0, true)); } @@ -190,6 +201,21 @@ protected void doAssertLuceneQuery(SemanticQueryBuilder queryBuilder, Query quer case SPARSE_EMBEDDING -> assertSparseEmbeddingLuceneQuery(nestedQuery.getChildQuery()); case TEXT_EMBEDDING -> assertTextEmbeddingLuceneQuery(nestedQuery.getChildQuery()); } + + if (queryBuilder.innerHit() != null) { + // Rewrite to a nested query + QueryBuilder rewrittenQueryBuilder = rewriteQuery(queryBuilder, createQueryRewriteContext(), createSearchExecutionContext()); + assertThat(rewrittenQueryBuilder, instanceOf(NestedQueryBuilder.class)); + + NestedQueryBuilder nestedQueryBuilder = (NestedQueryBuilder) rewrittenQueryBuilder; + Map innerHitInternals = new HashMap<>(); + InnerHitContextBuilder.extractInnerHits(nestedQueryBuilder, innerHitInternals); + assertThat(innerHitInternals.size(), equalTo(1)); + + InnerHitContextBuilder innerHits = innerHitInternals.get(queryBuilder.innerHit().getFieldName()); + assertNotNull(innerHits); + assertThat(innerHits.innerHitBuilder(), equalTo(queryBuilder.innerHit().toInnerHitBuilder())); + } } private void assertSparseEmbeddingLuceneQuery(Query query) { @@ -312,6 +338,20 @@ public void testToXContent() throws IOException { "query": "bar" } }""", queryBuilder); + + SemanticQueryInnerHitBuilder innerHitBuilder = new SemanticQueryInnerHitBuilder().setFrom(1).setSize(2); + queryBuilder = new SemanticQueryBuilder("foo", "bar", innerHitBuilder); + checkGeneratedJson(""" + { + "semantic": { + "field": "foo", + "query": "bar", + "inner_hits": { + "from": 1, + "size": 2 + } + } + }""", queryBuilder); } public void testSerializingQueryWhenNoInferenceId() throws IOException { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index e5f0989b43976..ca48d5427d18b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -49,6 +49,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -855,12 +856,11 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingResults_IsEmpty() when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); doAnswer(invocation -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[6]; + ActionListener listener = invocation.getArgument(7); listener.onResponse(new InferenceTextEmbeddingFloatResults(List.of())); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), anyBoolean(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -878,12 +878,11 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingByteResults_IsEmp when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); doAnswer(invocation -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[6]; + ActionListener listener = invocation.getArgument(7); listener.onResponse(new InferenceTextEmbeddingByteResults(List.of())); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), anyBoolean(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -903,12 +902,11 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingResults() { var textEmbedding = TextEmbeddingResultsTests.createRandomResults(); doAnswer(invocation -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[6]; + ActionListener listener = invocation.getArgument(7); listener.onResponse(textEmbedding); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), anyBoolean(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -927,12 +925,11 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingByteResults() { var textEmbedding = InferenceTextEmbeddingByteResultsTests.createRandomResults(); doAnswer(invocation -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[6]; + ActionListener listener = invocation.getArgument(7); listener.onResponse(textEmbedding); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), anyBoolean(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index bbf34354e1818..297a42f9d1fa7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -671,6 +671,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAmazonBedrockModel() throws IOExc mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -721,6 +722,7 @@ public void testInfer_SendsRequest_ForEmbeddingsModel() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -762,6 +764,7 @@ public void testInfer_SendsRequest_ForChatCompletionModel() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1025,6 +1028,7 @@ public void testInfer_UnauthorizedResponse() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java index 5e32344ab3840..c3693c227c435 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java @@ -452,6 +452,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAValidModel() throws IOException mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -506,6 +507,7 @@ public void testInfer_SendsCompletionRequest() throws IOException { model, null, List.of("input"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index 6f33c36f42db4..bb736f592fbdb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -825,6 +825,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAzureAiStudioModel() throws IOExc mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -954,6 +955,7 @@ public void testInfer_WithChatCompletionModel() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1004,6 +1006,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index b3fbd6fc9b425..142877c09180f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -601,6 +601,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAzureOpenAiModel() throws IOExcep mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -656,6 +657,7 @@ public void testInfer_SendsRequest() throws IOException, URISyntaxException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1051,6 +1053,7 @@ public void testInfer_UnauthorisedResponse() throws IOException, URISyntaxExcept model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index aebc3e3776c40..a577a6664d39d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -622,6 +622,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotCohereModel() throws IOException mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -689,6 +690,7 @@ public void testInfer_SendsRequest() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -932,6 +934,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -991,6 +994,7 @@ public void testInfer_SetsInputTypeToIngest_FromInferParameter_WhenTaskSettingsA model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1064,6 +1068,7 @@ public void testInfer_SetsInputTypeToIngestFromInferParameter_WhenModelSettingIs model, null, List.of("abc"), + false, CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap(InputType.SEARCH, null), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1135,6 +1140,7 @@ public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspec model, null, List.of("abc"), + false, new HashMap<>(), InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 38124b3401aaa..0bbf2be7301d8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -346,6 +346,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAValidModel() throws IOException mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -397,6 +398,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { model, null, List.of("input text"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index a8882bb244512..5d79d0e01f401 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -503,6 +503,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotGoogleAiStudioModel() throws IOEx mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -578,6 +579,7 @@ public void testInfer_SendsCompletionRequest() throws IOException { model, null, List.of("input"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -634,6 +636,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { model, null, List.of(input), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -775,6 +778,7 @@ public void testInfer_ResourceNotFound() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -914,6 +918,45 @@ public void testCheckModelConfig_DoesNotUpdateSimilarity_WhenItIsSpecifiedAsCosi } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new GoogleAiStudioService(senderFactory, createWithEmptySettings(threadPool))) { + var model = GoogleAiStudioCompletionModelTests.createModel(randomAlphaOfLength(10), randomAlphaOfLength(10)); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new GoogleAiStudioService(senderFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var model = GoogleAiStudioEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomNonNegativeInt(), + similarityMeasure + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.DOT_PRODUCT : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + public static Map buildExpectationCompletions(List completions) { return Map.of( ChatCompletionResults.COMPLETION, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java index 22c3b7895460a..168110ae8f7c7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -69,6 +69,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotHuggingFaceModel() throws IOExcep mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index f68aedd69f365..d13dea2ab6b4c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -438,6 +438,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -481,6 +482,7 @@ public void testInfer_SendsElserRequest() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -595,6 +597,45 @@ public void testCheckModelConfig_DefaultsSimilarityToCosine() throws IOException } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new HuggingFaceService(senderFactory, createWithEmptySettings(threadPool))) { + var model = HuggingFaceElserModelTests.createModel(randomAlphaOfLength(10), randomAlphaOfLength(10)); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new HuggingFaceService(senderFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var model = HuggingFaceEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomNonNegativeInt(), + randomNonNegativeInt(), + similarityMeasure + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.COSINE : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + public void testChunkedInfer_CallsInfer_TextEmbedding_ConvertsFloatResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index e0936c778c7a7..a2de7c15d54da 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -409,6 +409,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotIbmWatsonxModel() throws IOExcept mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -465,6 +466,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { model, null, List.of(input), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -588,6 +590,7 @@ public void testInfer_ResourceNotFound() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java index c833f00c4c433..33a2b43caf174 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.inference.ModelConfigurationsTests; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -38,6 +39,7 @@ import org.elasticsearch.xpack.inference.services.mistral.embeddings.MistralEmbeddingModelTests; import org.elasticsearch.xpack.inference.services.mistral.embeddings.MistralEmbeddingsModel; import org.elasticsearch.xpack.inference.services.mistral.embeddings.MistralEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -388,6 +390,48 @@ public void testCheckModelConfig_ForEmbeddingsModel_Works() throws IOException { } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new MistralService(senderFactory, createWithEmptySettings(threadPool))) { + var model = new Model(ModelConfigurationsTests.createRandomInstance()); + + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new MistralService(senderFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var model = MistralEmbeddingModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomNonNegativeInt(), + randomNonNegativeInt(), + similarityMeasure, + RateLimitSettingsTests.createRandom() + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.DOT_PRODUCT : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + public void testInfer_ThrowsErrorWhenModelIsNotMistralEmbeddingsModel() throws IOException { var sender = mock(Sender.class); @@ -402,6 +446,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotMistralEmbeddingsModel() throws I mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -527,6 +572,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index e4a304f818328..32099c4bd0be9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -936,6 +936,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotOpenAiModel() throws IOException mockModel, null, List.of(""), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -990,6 +991,7 @@ public void testInfer_SendsRequest() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1433,7 +1435,7 @@ private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure si randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), - null, + similarityMeasure, randomNonNegativeInt(), randomNonNegativeInt(), randomBoolean() @@ -1441,7 +1443,8 @@ private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure si Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); - assertEquals(SimilarityMeasure.DOT_PRODUCT, updatedModel.getServiceSettings().similarity()); + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.DOT_PRODUCT : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); } } @@ -1469,6 +1472,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { model, null, List.of("abc"), + false, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidatorTests.java index 23000ce431e7b..767dd4d64a7d3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleServiceIntegrationValidatorTests.java @@ -64,6 +64,7 @@ public void testValidate_ServiceThrowsException() { eq(mockModel), eq(null), eq(TEST_INPUT), + eq(false), eq(Map.of()), eq(InputType.INGEST), eq(InferenceAction.Request.DEFAULT_TIMEOUT), @@ -94,7 +95,7 @@ public void testValidate_SuccessfulCallToServiceForReRankTaskType() { private void mockSuccessfulCallToService(String query, InferenceServiceResults result) { doAnswer(ans -> { - ActionListener responseListener = ans.getArgument(6); + ActionListener responseListener = ans.getArgument(7); responseListener.onResponse(result); return null; }).when(mockInferenceService) @@ -102,6 +103,7 @@ private void mockSuccessfulCallToService(String query, InferenceServiceResults r eq(mockModel), eq(query), eq(TEST_INPUT), + eq(false), eq(Map.of()), eq(InputType.INGEST), eq(InferenceAction.Request.DEFAULT_TIMEOUT), @@ -117,12 +119,12 @@ private void verifyCallToService(boolean withQuery) { eq(mockModel), eq(withQuery ? TEST_QUERY : null), eq(TEST_INPUT), + eq(false), eq(Map.of()), eq(InputType.INGEST), eq(InferenceAction.Request.DEFAULT_TIMEOUT), any() ); - verify(mockActionListener).delegateFailureAndWrap(any()); verifyNoMoreInteractions(mockInferenceService, mockModel, mockActionListener, mockInferenceServiceResults); } } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 2070b3752791a..4d90d8faeb3f3 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -122,6 +122,147 @@ setup: - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } +--- +"Query using a sparse embedding model and inner hits": + - requires: + cluster_features: "semantic_text.inner_hits" + reason: semantic_text inner hits support added in 8.16.0 + + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: ["inference test", "another inference test", "yet another inference test"] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: {} + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.0._source.text: "another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.0._source.embeddings + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.1._source.text: "yet another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.1._source.embeddings + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.2._source.text: "inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.2._source.embeddings + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "size": 1 + } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 1 } + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.0._source.text: "another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.0._source.embeddings + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "from": 1 + } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.0._source.text: "yet another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.0._source.embeddings + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.1._source.text: "inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.1._source.embeddings + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "from": 1, + "size": 1 + } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 1 } + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.0._source.text: "yet another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.0._source.embeddings + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "from": 3 + } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 0 } # Hits total drops to zero when you page off the end + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 0 } + --- "Numeric query using a sparse embedding model": - skip: @@ -250,6 +391,147 @@ setup: - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } +--- +"Query using a dense embedding model and inner hits": + - requires: + cluster_features: "semantic_text.inner_hits" + reason: semantic_text inner hits support added in 8.16.0 + + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-dense-index + id: doc_1 + body: + inference_field: ["inference test", "another inference test", "yet another inference test"] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: {} + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.0._source.text: "inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.0._source.embeddings + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.1._source.text: "yet another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.1._source.embeddings + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.2._source.text: "another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.2._source.embeddings + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "size": 1 + } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 1 } + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.0._source.text: "inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.0._source.embeddings + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "from": 1 + } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.0._source.text: "yet another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.0._source.embeddings + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.1._source.text: "another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.1._source.embeddings + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "from": 1, + "size": 1 + } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 1 } + - match: { hits.hits.0.inner_hits.inference_field.hits.hits.0._source.text: "yet another inference test" } + - not_exists: hits.hits.0.inner_hits.inference_field.hits.hits.0._source.embeddings + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "from": 3 + } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field.hits.total.value: 0 } # Hits total drops to zero when you page off the end + - length: { hits.hits.0.inner_hits.inference_field.hits.hits: 0 } + --- "Numeric query using a dense embedding model": - skip: @@ -478,6 +760,101 @@ setup: - close_to: { hits.hits.0._score: { value: 3.7837332e17, error: 1e10 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } +--- +"Query multiple semantic text fields with inner hits": + - requires: + cluster_features: "semantic_text.inner_hits" + reason: semantic_text inner hits support added in 8.16.0 + + - do: + indices.create: + index: test-multi-semantic-text-field-index + body: + mappings: + properties: + inference_field_1: + type: semantic_text + inference_id: sparse-inference-id + inference_field_2: + type: semantic_text + inference_id: sparse-inference-id + + - do: + index: + index: test-multi-semantic-text-field-index + id: doc_1 + body: + inference_field_1: [ "inference test 1", "another inference test 1" ] + inference_field_2: [ "inference test 2", "another inference test 2", "yet another inference test 2" ] + refresh: true + + - do: + search: + index: test-multi-semantic-text-field-index + body: + query: + bool: + must: + - semantic: + field: "inference_field_1" + query: "inference test" + inner_hits: { } + - semantic: + field: "inference_field_2" + query: "inference test" + inner_hits: { } + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0._source.inference_field_1.inference.chunks: 2 } + - length: { hits.hits.0._source.inference_field_2.inference.chunks: 3 } + - match: { hits.hits.0.inner_hits.inference_field_1.hits.total.value: 2 } + - length: { hits.hits.0.inner_hits.inference_field_1.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.inference_field_2.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.inference_field_2.hits.hits: 3 } + +--- +"Query semantic text field in object with inner hits": + - requires: + cluster_features: "semantic_text.inner_hits" + reason: semantic_text inner hits support added in 8.16.0 + + - do: + indices.create: + index: test-semantic-text-in-object-index + body: + mappings: + properties: + container: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + + - do: + index: + index: test-semantic-text-in-object-index + id: doc_1 + body: + container.inference_field: ["inference test", "another inference test", "yet another inference test"] + refresh: true + + - do: + search: + index: test-semantic-text-in-object-index + body: + query: + semantic: + field: "container.inference_field" + query: "inference test" + inner_hits: {} + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - exists: hits.hits.0.inner_hits.container\.inference_field + - match: { hits.hits.0.inner_hits.container\.inference_field.hits.total.value: 3 } + - length: { hits.hits.0.inner_hits.container\.inference_field.hits.hits: 3 } + --- "Query the wrong field type": - do: @@ -839,3 +1216,41 @@ setup: - match: { error.type: "resource_not_found_exception" } - match: { error.reason: "Inference endpoint not found [invalid-inference-id]" } + +--- +"Query using inner hits with invalid args": + - requires: + cluster_features: "semantic_text.inner_hits" + reason: semantic_text inner hits support added in 8.16.0 + + - do: + catch: bad_request + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "from": -1 + } + + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "illegal from value, at least 0 or higher" } + + - do: + catch: bad_request + search: + index: test-sparse-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + inner_hits: { + "size": -1 + } + + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "illegal size value, at least 0 or higher" } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index e38f953be96a3..5cb7bf9e75252 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -17,6 +17,7 @@ import java.util.Collection; import java.util.List; +import static org.elasticsearch.xpack.cluster.settings.ClusterSettings.CLUSTER_LOGSDB_ENABLED; import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseService.FALLBACK_SETTING; public class LogsDBPlugin extends Plugin { @@ -24,9 +25,12 @@ public class LogsDBPlugin extends Plugin { private final Settings settings; private final SyntheticSourceLicenseService licenseService; + private final LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider; + public LogsDBPlugin(Settings settings) { this.settings = settings; this.licenseService = new SyntheticSourceLicenseService(settings); + this.logsdbIndexModeSettingsProvider = new LogsdbIndexModeSettingsProvider(settings); } @Override @@ -34,6 +38,10 @@ public Collection createComponents(PluginServices services) { licenseService.setLicenseState(XPackPlugin.getSharedLicenseState()); var clusterSettings = services.clusterService().getClusterSettings(); clusterSettings.addSettingsUpdateConsumer(FALLBACK_SETTING, licenseService::setSyntheticSourceFallback); + clusterSettings.addSettingsUpdateConsumer( + CLUSTER_LOGSDB_ENABLED, + logsdbIndexModeSettingsProvider::updateClusterIndexModeLogsdbEnabled + ); // Nothing to share here: return super.createComponents(services); } @@ -41,13 +49,13 @@ public Collection createComponents(PluginServices services) { @Override public Collection getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) { if (DiscoveryNode.isStateless(settings)) { - return List.of(); + return List.of(logsdbIndexModeSettingsProvider); } - return List.of(new SyntheticSourceIndexSettingsProvider(licenseService)); + return List.of(new SyntheticSourceIndexSettingsProvider(licenseService), logsdbIndexModeSettingsProvider); } @Override public List> getSettings() { - return List.of(FALLBACK_SETTING); + return List.of(FALLBACK_SETTING, CLUSTER_LOGSDB_ENABLED); } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java new file mode 100644 index 0000000000000..3f6bb66dfa438 --- /dev/null +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.index.IndexSettings; + +import java.time.Instant; +import java.util.List; +import java.util.Locale; + +import static org.elasticsearch.xpack.cluster.settings.ClusterSettings.CLUSTER_LOGSDB_ENABLED; + +final class LogsdbIndexModeSettingsProvider implements IndexSettingProvider { + private static final String LOGS_PATTERN = "logs-*-*"; + private volatile boolean isLogsdbEnabled; + + LogsdbIndexModeSettingsProvider(final Settings settings) { + this.isLogsdbEnabled = CLUSTER_LOGSDB_ENABLED.get(settings); + } + + void updateClusterIndexModeLogsdbEnabled(boolean isLogsdbEnabled) { + this.isLogsdbEnabled = isLogsdbEnabled; + } + + @Override + public Settings getAdditionalIndexSettings( + final String indexName, + final String dataStreamName, + boolean isTimeSeries, + final Metadata metadata, + final Instant resolvedAt, + final Settings settings, + final List combinedTemplateMappings + ) { + if (isLogsdbEnabled == false || dataStreamName == null) { + return Settings.EMPTY; + } + + final IndexMode indexMode = resolveIndexMode(settings.get(IndexSettings.MODE.getKey())); + if (indexMode != null) { + return Settings.EMPTY; + } + + if (usesLogsAtSettingsComponentTemplate(metadata, dataStreamName) && matchesLogsPattern(dataStreamName)) { + return Settings.builder().put("index.mode", IndexMode.LOGSDB.getName()).build(); + } + + return Settings.EMPTY; + } + + private static boolean matchesLogsPattern(final String name) { + return Regex.simpleMatch(LOGS_PATTERN, name); + } + + private IndexMode resolveIndexMode(final String mode) { + return mode != null ? Enum.valueOf(IndexMode.class, mode.toUpperCase(Locale.ROOT)) : null; + } + + private boolean usesLogsAtSettingsComponentTemplate(final Metadata metadata, final String name) { + final String template = MetadataIndexTemplateService.findV2Template(metadata, name, false); + if (template == null) { + return false; + } + final ComposableIndexTemplate composableIndexTemplate = metadata.templatesV2().get(template); + if (composableIndexTemplate == null) { + return false; + } + for (final String componentTemplate : composableIndexTemplate.composedOf()) { + if ("logs@settings".equals(componentTemplate)) { + return true; + } + } + return false; + } + +} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java new file mode 100644 index 0000000000000..eeb5389644c02 --- /dev/null +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java @@ -0,0 +1,326 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplateMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.Map; + +public class LogsdbIndexModeSettingsProviderTests extends ESTestCase { + + public static final String DEFAULT_MAPPING = """ + { + "_doc": { + "properties": { + "@timestamp": { + "type": "date" + }, + "message": { + "type": "keyword" + }, + "host.name": { + "type": "keyword" + } + } + } + } + """; + + public void testLogsDbDisabled() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", false).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + Metadata.EMPTY_METADATA, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testOnIndexCreation() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + "logs-apache-production", + null, + false, + Metadata.EMPTY_METADATA, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testOnExplicitStandardIndex() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + Metadata.EMPTY_METADATA, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.getName()).build(), + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testOnExplicitTimeSeriesIndex() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + Metadata.EMPTY_METADATA, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.getName()).build(), + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testNonLogsDataStream() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs", + false, + Metadata.EMPTY_METADATA, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testWithoutLogsComponentTemplate() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + buildMetadata(List.of("*"), List.of()), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testWithLogsComponentTemplate() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + buildMetadata(List.of("*"), List.of("logs@settings")), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertIndexMode(additionalIndexSettings, IndexMode.LOGSDB.getName()); + } + + public void testWithMultipleComponentTemplates() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + buildMetadata(List.of("*"), List.of("logs@settings", "logs@custom")), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertIndexMode(additionalIndexSettings, IndexMode.LOGSDB.getName()); + } + + public void testWithCustomComponentTemplatesOnly() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + buildMetadata(List.of("*"), List.of("logs@custom", "custom-component-template")), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testNonMatchingTemplateIndexPattern() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + buildMetadata(List.of("standard-apache-production"), List.of("logs@settings")), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testCaseSensitivity() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "LOGS-apache-production", + false, + Metadata.EMPTY_METADATA, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testMultipleHyphensInDataStreamName() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", true).build() + ); + + final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production-eu", + false, + Metadata.EMPTY_METADATA, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(additionalIndexSettings.isEmpty()); + } + + public void testBeforeAndAFterSettingUpdate() throws IOException { + final LogsdbIndexModeSettingsProvider provider = new LogsdbIndexModeSettingsProvider( + Settings.builder().put("cluster.logsdb.enabled", false).build() + ); + + final Settings beforeSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + buildMetadata(List.of("*"), List.of("logs@settings")), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(beforeSettings.isEmpty()); + + provider.updateClusterIndexModeLogsdbEnabled(true); + + final Settings afterSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + buildMetadata(List.of("*"), List.of("logs@settings")), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertIndexMode(afterSettings, IndexMode.LOGSDB.getName()); + + provider.updateClusterIndexModeLogsdbEnabled(false); + + final Settings laterSettings = provider.getAdditionalIndexSettings( + null, + "logs-apache-production", + false, + buildMetadata(List.of("*"), List.of("logs@settings")), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Settings.EMPTY, + List.of(new CompressedXContent(DEFAULT_MAPPING)) + ); + + assertTrue(laterSettings.isEmpty()); + } + + private static Metadata buildMetadata(final List indexPatterns, final List componentTemplates) throws IOException { + final Template template = new Template(Settings.EMPTY, new CompressedXContent(DEFAULT_MAPPING), null); + final ComposableIndexTemplate composableTemplate = ComposableIndexTemplate.builder() + .indexPatterns(indexPatterns) + .template(template) + .componentTemplates(componentTemplates) + .priority(1_000L) + .version(1L) + .build(); + return Metadata.builder() + .putCustom(ComposableIndexTemplateMetadata.TYPE, new ComposableIndexTemplateMetadata(Map.of("composable", composableTemplate))) + .build(); + } + + private void assertIndexMode(final Settings settings, final String expectedIndexMode) { + assertEquals(expectedIndexMode, settings.get(IndexSettings.MODE.getKey())); + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 1bc867a849090..f8a590a23a2c1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -327,6 +327,7 @@ import org.elasticsearch.xpack.ml.dataframe.process.results.AnalyticsResult; import org.elasticsearch.xpack.ml.dataframe.process.results.MemoryUsageEstimationResult; import org.elasticsearch.xpack.ml.inference.TrainedModelStatsService; +import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentClusterService; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; @@ -1285,13 +1286,21 @@ public Collection createComponents(PluginServices services) { new MlAutoscalingDeciderService(memoryTracker, settings, nodeAvailabilityZoneMapper, clusterService) ); - MlInitializationService mlInitializationService = new MlInitializationService( - settings, + AdaptiveAllocationsScalerService adaptiveAllocationsScalerService = new AdaptiveAllocationsScalerService( threadPool, clusterService, client, inferenceAuditor, telemetryProvider.getMeterRegistry(), + machineLearningExtension.get().isNlpEnabled() + ); + + MlInitializationService mlInitializationService = new MlInitializationService( + settings, + threadPool, + clusterService, + client, + adaptiveAllocationsScalerService, mlAssignmentNotifier, machineLearningExtension.get().isAnomalyDetectionEnabled(), machineLearningExtension.get().isDataFrameAnalyticsEnabled(), @@ -1317,6 +1326,7 @@ public Collection createComponents(PluginServices services) { jobManagerHolder, autodetectProcessManager, mlInitializationService, + adaptiveAllocationsScalerService, jobDataCountsPersister, datafeedRunner, datafeedManager, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index 98dfb13d9e3e4..45a71a80de077 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -30,11 +30,9 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; -import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import java.util.Collections; import java.util.Map; @@ -67,8 +65,7 @@ public final class MlInitializationService implements ClusterStateListener { ThreadPool threadPool, ClusterService clusterService, Client client, - InferenceAuditor inferenceAuditor, - MeterRegistry meterRegistry, + AdaptiveAllocationsScalerService adaptiveAllocationsScalerService, MlAssignmentNotifier mlAssignmentNotifier, boolean isAnomalyDetectionEnabled, boolean isDataFrameAnalyticsEnabled, @@ -88,7 +85,7 @@ public final class MlInitializationService implements ClusterStateListener { isDataFrameAnalyticsEnabled, isNlpEnabled ), - new AdaptiveAllocationsScalerService(threadPool, clusterService, client, inferenceAuditor, meterRegistry, isNlpEnabled), + adaptiveAllocationsScalerService, clusterService ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExternalInferModelAction.java index 545dcfbefecec..5603e9c4dca8d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExternalInferModelAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -25,7 +26,8 @@ public TransportExternalInferModelAction( Client client, ClusterService clusterService, XPackLicenseState licenseState, - TrainedModelProvider trainedModelProvider + TrainedModelProvider trainedModelProvider, + AdaptiveAllocationsScalerService adaptiveAllocationsScalerService ) { super( InferModelAction.EXTERNAL_NAME, @@ -35,7 +37,8 @@ public TransportExternalInferModelAction( client, clusterService, licenseState, - trainedModelProvider + trainedModelProvider, + adaptiveAllocationsScalerService ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index 0c4064348b3f6..b69f8c7d62eb2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -66,6 +67,7 @@ public class TransportInternalInferModelAction extends HandledTransportAction format("[%s] model deployment not allocated to any node", assignment.getDeploymentId())); - listener.onFailure( - ExceptionsHelper.conflictStatusException("Trained model deployment [" + request.getId() + "] is not allocated to any nodes") - ); + String message = "Trained model deployment [" + request.getId() + "] is not allocated to any nodes"; + boolean starting = adaptiveAllocationsScalerService.maybeStartAllocation(assignment); + if (starting) { + message += "; starting deployment of one allocation"; + } + logger.debug(message); + listener.onFailure(ExceptionsHelper.conflictStatusException(message)); return; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java index 044556d1b30ac..05e7202b8efe9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.TimeValue; /** * Processes measured requests counts and inference times and decides whether @@ -21,6 +22,12 @@ public class AdaptiveAllocationsScaler { static final double SCALE_UP_THRESHOLD = 0.9; private static final double SCALE_DOWN_THRESHOLD = 0.85; + /** + * The time interval without any requests that has to pass, before scaling down + * to zero allocations (in case min_allocations = 0). + */ + private static final long SCALE_TO_ZERO_AFTER_NO_REQUESTS_TIME_SECONDS = TimeValue.timeValueMinutes(15).getSeconds(); + /** * If the max_number_of_allocations is not set, use this value for now to prevent scaling up * to high numbers due to possible bugs or unexpected behaviour in the scaler. @@ -33,6 +40,7 @@ public class AdaptiveAllocationsScaler { private final String deploymentId; private final KalmanFilter1d requestRateEstimator; private final KalmanFilter1d inferenceTimeEstimator; + private double timeWithoutRequestsSeconds; private int numberOfAllocations; private int neededNumberOfAllocations; @@ -55,6 +63,7 @@ public class AdaptiveAllocationsScaler { // the number of allocations changes, which is passed explicitly to the estimator. requestRateEstimator = new KalmanFilter1d(deploymentId + ":rate", 100, true); inferenceTimeEstimator = new KalmanFilter1d(deploymentId + ":time", 100, false); + timeWithoutRequestsSeconds = 0.0; this.numberOfAllocations = numberOfAllocations; neededNumberOfAllocations = numberOfAllocations; minNumberOfAllocations = null; @@ -73,6 +82,11 @@ void setMinMaxNumberOfAllocations(Integer minNumberOfAllocations, Integer maxNum void process(AdaptiveAllocationsScalerService.Stats stats, double timeIntervalSeconds, int numberOfAllocations) { lastMeasuredQueueSize = stats.pendingCount(); + if (stats.requestCount() > 0) { + timeWithoutRequestsSeconds = 0.0; + } else { + timeWithoutRequestsSeconds += timeIntervalSeconds; + } // The request rate (per second) is the request count divided by the time. // Assuming a Poisson process for the requests, the variance in the request @@ -145,7 +159,7 @@ Integer scale() { numberOfAllocations--; } - this.neededNumberOfAllocations = numberOfAllocations; + neededNumberOfAllocations = numberOfAllocations; if (maxNumberOfAllocations == null) { numberOfAllocations = Math.min(numberOfAllocations, MAX_NUMBER_OF_ALLOCATIONS_SAFEGUARD); @@ -156,6 +170,13 @@ Integer scale() { if (maxNumberOfAllocations != null) { numberOfAllocations = Math.min(numberOfAllocations, maxNumberOfAllocations); } + if (ScaleToZeroFeatureFlag.isEnabled() + && (minNumberOfAllocations == null || minNumberOfAllocations == 0) + && timeWithoutRequestsSeconds > SCALE_TO_ZERO_AFTER_NO_REQUESTS_TIME_SECONDS) { + logger.debug("[{}] adaptive allocations scaler: scaling down to zero, because of no requests.", deploymentId); + numberOfAllocations = 0; + neededNumberOfAllocations = 0; + } if (numberOfAllocations != oldNumberOfAllocations) { logger.debug( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index bbe90f769818b..775279a6b2553 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -415,49 +415,60 @@ private void processDeploymentStats(GetDeploymentStatsAction.Response statsRespo if (newNumberOfAllocations > numberOfAllocations.get(deploymentId)) { lastScaleUpTimesMillis.put(deploymentId, now); } - UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request(deploymentId); - updateRequest.setNumberOfAllocations(newNumberOfAllocations); - updateRequest.setIsInternal(true); - ClientHelper.executeAsyncWithOrigin( - client, - ClientHelper.ML_ORIGIN, - UpdateTrainedModelDeploymentAction.INSTANCE, - updateRequest, - ActionListener.wrap(updateResponse -> { - logger.info("adaptive allocations scaler: scaled [{}] to [{}] allocations.", deploymentId, newNumberOfAllocations); - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute( - () -> inferenceAuditor.info( - deploymentId, - Strings.format( - "adaptive allocations scaler: scaled [%s] to [%s] allocations.", - deploymentId, - newNumberOfAllocations - ) - ) - ); - }, e -> { - logger.atLevel(Level.WARN) - .withThrowable(e) - .log( - "adaptive allocations scaler: scaling [{}] to [{}] allocations failed.", - deploymentId, - newNumberOfAllocations - ); - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute( - () -> inferenceAuditor.warning( - deploymentId, - Strings.format( - "adaptive allocations scaler: scaling [%s] to [%s] allocations failed.", - deploymentId, - newNumberOfAllocations - ) - ) - ); - }) - ); + updateNumberOfAllocations(deploymentId, newNumberOfAllocations); } } } + + public boolean maybeStartAllocation(TrainedModelAssignment assignment) { + if (ScaleToZeroFeatureFlag.isEnabled() + && assignment.getAdaptiveAllocationsSettings() != null + && assignment.getAdaptiveAllocationsSettings().getEnabled() == Boolean.TRUE) { + lastScaleUpTimesMillis.put(assignment.getDeploymentId(), System.currentTimeMillis()); + updateNumberOfAllocations(assignment.getDeploymentId(), 1); + return true; + } + return false; + } + + private void updateNumberOfAllocations(String deploymentId, int numberOfAllocations) { + UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request(deploymentId); + updateRequest.setNumberOfAllocations(numberOfAllocations); + updateRequest.setIsInternal(true); + ClientHelper.executeAsyncWithOrigin( + client, + ClientHelper.ML_ORIGIN, + UpdateTrainedModelDeploymentAction.INSTANCE, + updateRequest, + ActionListener.wrap(updateResponse -> { + logger.info("adaptive allocations scaler: scaled [{}] to [{}] allocations.", deploymentId, numberOfAllocations); + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute( + () -> inferenceAuditor.info( + deploymentId, + Strings.format( + "adaptive allocations scaler: scaled [%s] to [%s] allocations.", + deploymentId, + numberOfAllocations + ) + ) + ); + }, e -> { + logger.atLevel(Level.WARN) + .withThrowable(e) + .log("adaptive allocations scaler: scaling [{}] to [{}] allocations failed.", deploymentId, numberOfAllocations); + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute( + () -> inferenceAuditor.warning( + deploymentId, + Strings.format( + "adaptive allocations scaler: scaling [%s] to [%s] allocations failed.", + deploymentId, + numberOfAllocations + ) + ) + ); + }) + ); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleToZeroFeatureFlag.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleToZeroFeatureFlag.java new file mode 100644 index 0000000000000..072b8c5593c93 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleToZeroFeatureFlag.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.adaptiveallocations; + +import org.elasticsearch.common.util.FeatureFlag; + +public class ScaleToZeroFeatureFlag { + private ScaleToZeroFeatureFlag() {} + + private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("inference_scale_to_zero"); + + public static boolean isEnabled() { + return FEATURE_FLAG.isEnabled(); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java index e010bd67dff75..ad3e752a3ea3e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java @@ -8,8 +8,8 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -28,11 +28,20 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteTrainedModelAction extends BaseRestHandler { + @UpdateForV9 + // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return List.of( - Route.builder(DELETE, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") - .replaces(DELETE, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) + // Route.builder(DELETE, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") + // .replaces(DELETE, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) + // .build() + new Route(DELETE, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}"), + Route.builder(DELETE, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}") + .deprecateAndKeep("Use the trained_models API instead.") .build() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java index ae7b26ebad0e4..dfbe375f0d1fc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -49,13 +49,24 @@ public class RestGetTrainedModelsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetTrainedModelsAction.class); private static final String INCLUDE_MODEL_DEFINITION = "include_model_definition"; + @UpdateForV9 + // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") - .replaces(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) + // Route.builder(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") + // .replaces(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) + // .build(), + // Route.builder(GET, BASE_PATH + "trained_models").replaces(GET, BASE_PATH + "inference", RestApiVersion.V_8).build() + new Route(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}"), + Route.builder(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}") + .deprecateAndKeep("Use the trained_models API instead.") .build(), - Route.builder(GET, BASE_PATH + "trained_models").replaces(GET, BASE_PATH + "inference", RestApiVersion.V_8).build() + new Route(GET, BASE_PATH + "trained_models"), + Route.builder(GET, BASE_PATH + "inference").deprecateAndKeep("Use the trained_models API instead.").build() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java index 9c44728fb75e2..3c192d80f7485 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java @@ -9,7 +9,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -30,15 +30,26 @@ @ServerlessScope(Scope.PUBLIC) public class RestGetTrainedModelsStatsAction extends BaseRestHandler { + @UpdateForV9 + // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return List.of( - Route.builder(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}/_stats") - .replaces(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}/_stats", RestApiVersion.V_8) + // Route.builder(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}/_stats") + // .replaces(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}/_stats", RestApiVersion.V_8) + // .build(), + // Route.builder(GET, BASE_PATH + "trained_models/_stats") + // .replaces(GET, BASE_PATH + "inference/_stats", RestApiVersion.V_8) + // .build() + new Route(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}/_stats"), + Route.builder(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}/_stats") + .deprecateAndKeep("Use the trained_models API instead.") .build(), - Route.builder(GET, BASE_PATH + "trained_models/_stats") - .replaces(GET, BASE_PATH + "inference/_stats", RestApiVersion.V_8) - .build() + new Route(GET, BASE_PATH + "trained_models/_stats"), + Route.builder(GET, BASE_PATH + "inference/_stats").deprecateAndKeep("Use the trained_models API instead.").build() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelDeploymentAction.java index 61f319a2157c4..7327f7426e00c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelDeploymentAction.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.ValidationException; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestCancellableNodeClient; @@ -37,11 +37,29 @@ public String getName() { return "xpack_ml_infer_trained_models_deployment_action"; } + @UpdateForV9 // these routes were ".deprecated" in RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // this API in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return Collections.singletonList( + // Route.builder(POST, PATH) + // .deprecated( + // "[" + // + POST.name() + // + " " + // + PATH + // + "] is deprecated! Use [" + // + POST.name() + // + " " + // + RestInferTrainedModelAction.PATH + // + "] instead.", + // RestApiVersion.V_8 + // ) + // .build() Route.builder(POST, PATH) - .deprecated( + .deprecateAndKeep( "[" + POST.name() + " " @@ -50,8 +68,7 @@ public List routes() { + POST.name() + " " + RestInferTrainedModelAction.PATH - + "] instead.", - RestApiVersion.V_8 + + "] instead." ) .build() ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java index e57d5912752d2..13d46b8878679 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ml.rest.inference; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,11 +27,20 @@ @ServerlessScope(Scope.PUBLIC) public class RestPutTrainedModelAction extends BaseRestHandler { + @UpdateForV9 + // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return List.of( - Route.builder(PUT, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") - .replaces(PUT, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) + // Route.builder(PUT, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") + // .replaces(PUT, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) + // .build() + new Route(PUT, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}"), + Route.builder(PUT, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}") + .deprecateAndKeep("Use the trained_models API instead.") .build() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java index 3d7b2d392836a..41462b016a60a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; @@ -27,12 +28,17 @@ public class RestPostDataAction extends BaseRestHandler { private static final String DEFAULT_RESET_START = ""; private static final String DEFAULT_RESET_END = ""; + @UpdateForV9 // these routes were ".deprecated" in RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // this API in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { final String msg = "Posting data directly to anomaly detection jobs is deprecated, " + "in a future major version it will be compulsory to use a datafeed"; return List.of( - Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecated(msg, RestApiVersion.V_8).build(), + // Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecated(msg, RestApiVersion.V_8).build(), + Route.builder(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecateAndKeep(msg).build(), Route.builder(POST, PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_data").deprecated(msg, RestApiVersion.V_7).build() ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java index a5b9597886e15..80c957ecb7a09 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java @@ -17,11 +17,9 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; -import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; -import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import org.junit.Before; import java.util.Map; @@ -40,8 +38,7 @@ public class MlInitializationServiceTests extends ESTestCase { private ThreadPool threadPool; private ClusterService clusterService; private Client client; - private InferenceAuditor inferenceAuditor; - private MeterRegistry meterRegistry; + private AdaptiveAllocationsScalerService adaptiveAllocationsScalerService; private MlAssignmentNotifier mlAssignmentNotifier; @Before @@ -50,8 +47,7 @@ public void setUpMocks() { threadPool = deterministicTaskQueue.getThreadPool(); clusterService = mock(ClusterService.class); client = mock(Client.class); - inferenceAuditor = mock(InferenceAuditor.class); - meterRegistry = mock(MeterRegistry.class); + adaptiveAllocationsScalerService = mock(AdaptiveAllocationsScalerService.class); mlAssignmentNotifier = mock(MlAssignmentNotifier.class); when(clusterService.getClusterName()).thenReturn(CLUSTER_NAME); @@ -77,8 +73,7 @@ public void testInitialize() { threadPool, clusterService, client, - inferenceAuditor, - meterRegistry, + adaptiveAllocationsScalerService, mlAssignmentNotifier, true, true, @@ -94,8 +89,7 @@ public void testInitialize_noMasterNode() { threadPool, clusterService, client, - inferenceAuditor, - meterRegistry, + adaptiveAllocationsScalerService, mlAssignmentNotifier, true, true, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java index 08097357725d0..7d98aaf67a7f3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java @@ -146,4 +146,49 @@ public void testAutoscaling_maxAllocationsSafeguard() { adaptiveAllocationsScaler.setMinMaxNumberOfAllocations(2, 77); assertThat(adaptiveAllocationsScaler.scale(), equalTo(77)); } + + public void testAutoscaling_scaleDownToZeroAllocations() { + assumeTrue("Should only run if adaptive allocations feature flag is enabled", ScaleToZeroFeatureFlag.isEnabled()); + + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + // 1 hour with 1 request per 1 seconds, so don't scale. + for (int i = 0; i < 3600; i++) { + adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(1, 0, 0, 0.05), 1, 1); + assertThat(adaptiveAllocationsScaler.scale(), nullValue()); + } + // 15 minutes with no requests, so don't scale. + for (int i = 0; i < 900; i++) { + adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(0, 0, 0, 0.05), 1, 1); + assertThat(adaptiveAllocationsScaler.scale(), nullValue()); + } + // 1 second with a request, so don't scale. + adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(1, 0, 0, 0.05), 1, 1); + assertThat(adaptiveAllocationsScaler.scale(), nullValue()); + // 15 minutes with no requests, so don't scale. + for (int i = 0; i < 900; i++) { + adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(0, 0, 0, 0.05), 1, 1); + assertThat(adaptiveAllocationsScaler.scale(), nullValue()); + } + // another second with no requests, so scale to zero allocations. + adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(0, 0, 0, 0.05), 1, 1); + assertThat(adaptiveAllocationsScaler.scale(), equalTo(0)); + // 15 minutes with no requests, so don't scale. + for (int i = 0; i < 900; i++) { + adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(0, 0, 0, 0.05), 1, 0); + assertThat(adaptiveAllocationsScaler.scale(), nullValue()); + } + } + + public void testAutoscaling_dontScaleDownToZeroAllocationsWhenMinAllocationsIsSet() { + assumeTrue("Should only run if adaptive allocations feature flag is enabled", ScaleToZeroFeatureFlag.isEnabled()); + + AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); + adaptiveAllocationsScaler.setMinMaxNumberOfAllocations(1, null); + + // 1 hour with no requests, + for (int i = 0; i < 3600; i++) { + adaptiveAllocationsScaler.process(new AdaptiveAllocationsScalerService.Stats(1, 0, 0, 0.05), 1, 1); + assertThat(adaptiveAllocationsScaler.scale(), nullValue()); + } + } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkActionTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkActionTests.java index cd05c9bf0d754..33470a35486a1 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkActionTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkActionTests.java @@ -126,10 +126,7 @@ public void testExecuteWithGlobalBlock() throws Exception { final MonitoringBulkRequest request = randomRequest(); assertThat( - asInstanceOf( - ClusterBlockException.class, - safeAwaitFailure(MonitoringBulkResponse.class, l -> action.execute(null, request, l)) - ), + safeAwaitFailure(ClusterBlockException.class, MonitoringBulkResponse.class, l -> action.execute(null, request, l)), hasToString(containsString("ClusterBlockException: blocked by: [SERVICE_UNAVAILABLE/2/no master]")) ); } @@ -175,9 +172,10 @@ public void testExecuteEmptyRequest() { ); assertThat( - asInstanceOf( + safeAwaitFailure( ActionRequestValidationException.class, - safeAwaitFailure(MonitoringBulkResponse.class, l -> action.execute(null, new MonitoringBulkRequest(), l)) + MonitoringBulkResponse.class, + l -> action.execute(null, new MonitoringBulkRequest(), l) ), hasToString(containsString("no monitoring documents added")) ); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/TableIdentifier.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/TableIdentifier.java index 188bd4cce9c13..ad3322ce4501d 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/TableIdentifier.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/TableIdentifier.java @@ -10,6 +10,8 @@ import java.util.Objects; +import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR; + public class TableIdentifier { private final Source source; @@ -55,7 +57,7 @@ public Source source() { } public String qualifiedIndex() { - return cluster != null ? cluster + ":" + index : index; + return cluster != null ? cluster + REMOTE_CLUSTER_INDEX_SEPARATOR + index : index; } @Override @@ -63,7 +65,7 @@ public String toString() { StringBuilder builder = new StringBuilder(); if (cluster != null) { builder.append(cluster); - builder.append(":"); + builder.append(REMOTE_CLUSTER_INDEX_SEPARATOR); } builder.append(index); return builder.toString(); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java index dad3c8574dc4a..f03e3a111d189 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -27,7 +28,6 @@ import java.util.StringJoiner; import static java.util.stream.Collectors.toList; -import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR; import static org.elasticsearch.transport.RemoteClusterAware.buildRemoteIndexName; import static org.elasticsearch.xpack.ql.util.NumericUtils.isUnsignedLong; @@ -375,10 +375,8 @@ public static String ordinal(int i) { } public static Tuple splitQualifiedIndex(String indexName) { - int separatorOffset = indexName.indexOf(REMOTE_CLUSTER_INDEX_SEPARATOR); - return separatorOffset > 0 - ? Tuple.tuple(indexName.substring(0, separatorOffset), indexName.substring(separatorOffset + 1)) - : Tuple.tuple(null, indexName); + String[] split = RemoteClusterAware.splitIndexName(indexName); + return Tuple.tuple(split[0], split[1]); } public static String qualifyAndJoinIndices(String cluster, String[] indices) { @@ -390,6 +388,6 @@ public static String qualifyAndJoinIndices(String cluster, String[] indices) { } public static boolean isQualified(String indexWildcard) { - return indexWildcard.indexOf(REMOTE_CLUSTER_INDEX_SEPARATOR) > 0; + return RemoteClusterAware.isRemoteIndexName(indexWildcard); } } diff --git a/x-pack/plugin/redact/src/main/java/org/elasticsearch/xpack/redact/RedactProcessor.java b/x-pack/plugin/redact/src/main/java/org/elasticsearch/xpack/redact/RedactProcessor.java index 04a423c7ea330..187126fb31e3e 100644 --- a/x-pack/plugin/redact/src/main/java/org/elasticsearch/xpack/redact/RedactProcessor.java +++ b/x-pack/plugin/redact/src/main/java/org/elasticsearch/xpack/redact/RedactProcessor.java @@ -55,6 +55,12 @@ public class RedactProcessor extends AbstractProcessor { private static final String DEFAULT_REDACTED_START = "<"; private static final String DEFAULT_REDACTED_END = ">"; + protected static final String REDACT_KEY = "_redact"; + protected static final String IS_REDACTED_KEY = "_is_redacted"; + protected static final String METADATA_PATH_REDACT = IngestDocument.INGEST_KEY + "." + REDACT_KEY; + // indicates if document has been redacted, path: _ingest._redact._is_redacted + protected static final String METADATA_PATH_REDACT_IS_REDACTED = METADATA_PATH_REDACT + "." + IS_REDACTED_KEY; + private final String redactField; private final List groks; private final boolean ignoreMissing; @@ -65,6 +71,8 @@ public class RedactProcessor extends AbstractProcessor { private final XPackLicenseState licenseState; private final boolean skipIfUnlicensed; + private final boolean traceRedact; + RedactProcessor( String tag, String description, @@ -76,7 +84,8 @@ public class RedactProcessor extends AbstractProcessor { String redactedEndToken, MatcherWatchdog matcherWatchdog, XPackLicenseState licenseState, - boolean skipIfUnlicensed + boolean skipIfUnlicensed, + boolean traceRedact ) { super(tag, description); this.redactField = redactField; @@ -94,6 +103,7 @@ public class RedactProcessor extends AbstractProcessor { } this.licenseState = licenseState; this.skipIfUnlicensed = skipIfUnlicensed; + this.traceRedact = traceRedact; } @Override @@ -128,6 +138,8 @@ public IngestDocument execute(IngestDocument ingestDocument) { try { String redacted = matchRedact(fieldValue, groks, redactedStartToken, redactedEndToken); ingestDocument.setFieldValue(redactField, redacted); + updateMetadataIfNecessary(ingestDocument, fieldValue, redacted); + return ingestDocument; } catch (RuntimeException e) { // grok throws a RuntimeException when the watchdog interrupts the match @@ -203,6 +215,21 @@ private static void matchRepeat(Grok grok, byte[] utf8Bytes, RegionTrackingMatch } while (offset != length); } + private void updateMetadataIfNecessary(IngestDocument ingestDocument, String fieldValue, String redacted) { + if (traceRedact == false || fieldValue == null) { + return; + } + + Boolean isRedactedMetadata = ingestDocument.getFieldValue(METADATA_PATH_REDACT_IS_REDACTED, Boolean.class, true); + boolean alreadyRedacted = Boolean.TRUE.equals(isRedactedMetadata); + boolean isRedacted = fieldValue.equals(redacted) == false; + + // document newly redacted + if (alreadyRedacted == false && isRedacted) { + ingestDocument.setFieldValue(METADATA_PATH_REDACT_IS_REDACTED, true); + } + } + /** * A Grok capture extractor which tracks matched regions * and the Grok pattern name for redaction later. @@ -389,6 +416,8 @@ public RedactProcessor create( String redactStart = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "prefix", DEFAULT_REDACTED_START); String redactEnd = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "suffix", DEFAULT_REDACTED_END); + boolean traceRedact = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "trace_redact", false); + if (matchPatterns == null || matchPatterns.isEmpty()) { throw newConfigurationException(TYPE, processorTag, "patterns", "List of patterns must not be empty"); } @@ -406,7 +435,8 @@ public RedactProcessor create( redactEnd, matcherWatchdog, licenseState, - skipIfUnlicensed + skipIfUnlicensed, + traceRedact ); } catch (Exception e) { throw newConfigurationException( diff --git a/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorFactoryTests.java b/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorFactoryTests.java index 376e7caa8137d..affcc72614aa8 100644 --- a/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorFactoryTests.java +++ b/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorFactoryTests.java @@ -68,6 +68,7 @@ public void testConfigKeysRemoved() throws Exception { config.put("patterns", List.of("%{MY_PATTERN:name}!")); config.put("pattern_definitions", Map.of("MY_PATTERN", "foo")); config.put("ignore_missing", true); + config.put("trace_redact", true); config.put("extra", "unused"); factory.create(null, null, null, config); diff --git a/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java b/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java index a775adb7a4c15..3f44957201ef0 100644 --- a/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java +++ b/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java @@ -259,7 +259,8 @@ public void testLicenseChecks() throws Exception { ">", MatcherWatchdog.noop(), notAllowed, - false // set skip_if_unlicensed to false, we do not want to skip, we do want to fail + false, // set skip_if_unlicensed to false, we do not want to skip, we do want to fail + false ); assertThat(processor.getSkipIfUnlicensed(), equalTo(false)); var ingestDoc = createIngestDoc(Map.of("not_the_field", "fieldValue")); @@ -314,6 +315,118 @@ public void testLicenseChanges() throws Exception { } } + @SuppressWarnings("unchecked") + public void testTraceRedact() throws Exception { + var config = new HashMap(); + config.put("field", "to_redact"); + config.put("patterns", List.of("%{EMAILADDRESS:REDACTED}")); + config.put("trace_redact", true); + { + var processor = new RedactProcessor.Factory(mockLicenseState(), MatcherWatchdog.noop()).create( + null, + "t", + "d", + new HashMap<>(config) + ); + var message = "this should not be redacted"; + var ingestDoc = createIngestDoc(Map.of("to_redact", message)); + var redactedDoc = processor.execute(ingestDoc); + + assertEquals(message, redactedDoc.getFieldValue("to_redact", String.class)); + assertNull(redactedDoc.getFieldValue(RedactProcessor.METADATA_PATH_REDACT_IS_REDACTED, Boolean.class, true)); + } + { + var processor = new RedactProcessor.Factory(mockLicenseState(), MatcherWatchdog.noop()).create( + null, + "t", + "d", + new HashMap<>(config) + ); + var ingestDoc = createIngestDoc(Map.of("to_redact", "thisisanemail@address.com will be redacted")); + var redactedDoc = processor.execute(ingestDoc); + + assertEquals(" will be redacted", redactedDoc.getFieldValue("to_redact", String.class)); + // validate ingest metadata path correctly resolved + assertTrue(redactedDoc.getFieldValue(RedactProcessor.METADATA_PATH_REDACT_IS_REDACTED, Boolean.class)); + // validate ingest metadata structure correct + var ingestMeta = redactedDoc.getIngestMetadata(); + assertTrue(ingestMeta.containsKey(RedactProcessor.REDACT_KEY)); + var redactMetadata = (HashMap) ingestMeta.get(RedactProcessor.REDACT_KEY); + assertTrue(redactMetadata.containsKey(RedactProcessor.IS_REDACTED_KEY)); + assertTrue((Boolean) redactMetadata.get(RedactProcessor.IS_REDACTED_KEY)); + } + { + var configNoTrace = new HashMap(); + configNoTrace.put("field", "to_redact"); + configNoTrace.put("patterns", List.of("%{EMAILADDRESS:REDACTED}")); + + var processor = new RedactProcessor.Factory(mockLicenseState(), MatcherWatchdog.noop()).create(null, "t", "d", configNoTrace); + var ingestDoc = createIngestDoc(Map.of("to_redact", "thisisanemail@address.com will be redacted")); + var redactedDoc = processor.execute(ingestDoc); + + assertEquals(" will be redacted", redactedDoc.getFieldValue("to_redact", String.class)); + assertNull(redactedDoc.getFieldValue(RedactProcessor.METADATA_PATH_REDACT_IS_REDACTED, Boolean.class, true)); + } + } + + public void testTraceRedactMultipleProcessors() throws Exception { + var configRedact = new HashMap(); + configRedact.put("field", "to_redact"); + configRedact.put("patterns", List.of("%{EMAILADDRESS:REDACTED}")); + configRedact.put("trace_redact", true); + + var configNoRedact = new HashMap(); + configNoRedact.put("field", "to_redact"); + configNoRedact.put("patterns", List.of("%{IP:REDACTED}")); // not in the doc + configNoRedact.put("trace_redact", true); + + // first processor does not redact doc, second one does + { + var processorRedact = new RedactProcessor.Factory(mockLicenseState(), MatcherWatchdog.noop()).create( + null, + "t1", + "d", + new HashMap<>(configRedact) + ); + var processorNoRedact = new RedactProcessor.Factory(mockLicenseState(), MatcherWatchdog.noop()).create( + null, + "t2", + "d", + new HashMap<>(configNoRedact) + ); + var ingestDocWithEmail = createIngestDoc(Map.of("to_redact", "thisisanemail@address.com will be redacted")); + + var docNotRedacted = processorNoRedact.execute(ingestDocWithEmail); + assertNull(docNotRedacted.getFieldValue(RedactProcessor.METADATA_PATH_REDACT_IS_REDACTED, Boolean.class, true)); + + var docRedacted = processorRedact.execute(docNotRedacted); + assertTrue(docRedacted.getFieldValue(RedactProcessor.METADATA_PATH_REDACT_IS_REDACTED, Boolean.class)); + } + // first processor redacts doc, second one does not + { + var processorRedact = new RedactProcessor.Factory(mockLicenseState(), MatcherWatchdog.noop()).create( + null, + "t1", + "d", + new HashMap<>(configRedact) + ); + var processorNoRedact = new RedactProcessor.Factory(mockLicenseState(), MatcherWatchdog.noop()).create( + null, + "t2", + "d", + new HashMap<>(configNoRedact) + ); + var ingestDocWithEmail = createIngestDoc(Map.of("to_redact", "thisisanemail@address.com will be redacted")); + + var docRedacted = processorRedact.execute(ingestDocWithEmail); + assertTrue(docRedacted.getFieldValue(RedactProcessor.METADATA_PATH_REDACT_IS_REDACTED, Boolean.class)); + + // validate does not override already redacted doc metadata + var docRedactedAlready = processorNoRedact.execute(docRedacted); + assertTrue(docRedactedAlready.getFieldValue(RedactProcessor.METADATA_PATH_REDACT_IS_REDACTED, Boolean.class)); + } + } + public void testMergeLongestRegion() { var r = List.of( new RedactProcessor.RegionTrackingMatchExtractor.Replacement(10, 20, "first"), diff --git a/x-pack/plugin/security/qa/audit/src/javaRestTest/java/org/elasticsearch/xpack/security/audit/AuditIT.java b/x-pack/plugin/security/qa/audit/src/javaRestTest/java/org/elasticsearch/xpack/security/audit/AuditIT.java index e6af9c634e72f..2c329db5e3b50 100644 --- a/x-pack/plugin/security/qa/audit/src/javaRestTest/java/org/elasticsearch/xpack/security/audit/AuditIT.java +++ b/x-pack/plugin/security/qa/audit/src/javaRestTest/java/org/elasticsearch/xpack/security/audit/AuditIT.java @@ -8,9 +8,7 @@ package org.elasticsearch.xpack.security.audit; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.SecureString; @@ -81,14 +79,14 @@ protected String getTestRestCluster() { } public void testAuditAuthenticationSuccess() throws Exception { - final Request request = new Request("GET", randomFrom("/_security/_authenticate", "/_xpack/security/_authenticate")); + final Request request = new Request("GET", "/_security/_authenticate"); executeAndVerifyAudit(request, AuditLevel.AUTHENTICATION_SUCCESS, event -> { assertThat(event, hasEntry(LoggingAuditTrail.AUTHENTICATION_TYPE_FIELD_NAME, "REALM")); }); } public void testAuditAuthenticationFailure() throws Exception { - final Request request = new Request("GET", randomFrom("/_security/_authenticate", "/_xpack/security/_authenticate")); + final Request request = new Request("GET", "/_security/_authenticate"); String basicAuth = basicAuthHeaderValue(API_USER, new SecureString(new char[0])); request.setOptions(request.getOptions().toBuilder().addHeader("Authorization", basicAuth).addParameter("ignore", "401")); executeAndVerifyAudit(request, AuditLevel.AUTHENTICATION_FAILED, event -> {}); @@ -96,7 +94,7 @@ public void testAuditAuthenticationFailure() throws Exception { public void testFilteringOfRequestBodies() throws Exception { final String username = randomAlphaOfLength(4) + randomIntBetween(100, 999); - final Request request = new Request(randomFrom("PUT", "POST"), randomFrom("/_security/user/", "/_xpack/security/user/") + username); + final Request request = new Request(randomFrom("PUT", "POST"), "/_security/user/" + username); final String password = randomAlphaOfLength(4) + randomIntBetween(10, 99) + randomAlphaOfLength(4); request.setJsonEntity("{ \"password\":\"" + password + "\", \"roles\":[\"superuser\"] }"); executeAndVerifyAudit(request, AuditLevel.AUTHENTICATION_SUCCESS, event -> { @@ -141,15 +139,6 @@ private void executeAndVerifyAudit(Request request, AuditLevel eventType, Checke } private static Response executeRequest(Request request) throws IOException { - if (request.getEndpoint().startsWith("/_xpack/security/")) { - final RequestOptions options = request.getOptions() - .toBuilder() - .addHeader("Content-Type", "application/json; compatible-with=7") - .addHeader("Accept", "application/json; compatible-with=7") - .setWarningsHandler(WarningsHandler.PERMISSIVE) - .build(); - request.setOptions(options); - } return client().performRequest(request); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java index 32337f0d66896..44cbf03f220a1 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java @@ -26,18 +26,24 @@ import org.elasticsearch.xpack.core.security.action.user.PutUserResponse; import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import org.hamcrest.Matchers; +import org.junit.After; import org.junit.Before; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -45,6 +51,14 @@ public class SecurityIndexManagerIntegTests extends SecurityIntegTestCase { + private final int concurrentCallsToOnAvailable = 6; + private final ExecutorService executor = Executors.newFixedThreadPool(concurrentCallsToOnAvailable); + + @After + public void shutdownExecutor() { + executor.shutdown(); + } + public void testConcurrentOperationsTryingToCreateSecurityIndexAndAlias() throws Exception { final int processors = Runtime.getRuntime().availableProcessors(); final int numThreads = Math.min(50, scaledRandomIntBetween((processors + 1) / 2, 4 * processors)); // up to 50 threads @@ -110,6 +124,12 @@ public void testOnIndexAvailableForSearchIndexCompletesWithinTimeout() throws Ex // pick longer wait than in the assertBusy that waits for below to ensure index has had enough time to initialize securityIndexManager.onIndexAvailableForSearch((ActionListener) future, TimeValue.timeValueSeconds(40)); + // check listener added + assertThat( + securityIndexManager.getStateChangeListeners(), + hasItem(instanceOf(SecurityIndexManager.StateConsumerWithCancellable.class)) + ); + createSecurityIndexWithWaitForActiveShards(); assertBusy( @@ -121,6 +141,12 @@ public void testOnIndexAvailableForSearchIndexCompletesWithinTimeout() throws Ex // security index creation is complete and index is available for search; therefore whenIndexAvailableForSearch should report // success in time future.actionGet(); + + // check no remaining listeners + assertThat( + securityIndexManager.getStateChangeListeners(), + not(hasItem(instanceOf(SecurityIndexManager.StateConsumerWithCancellable.class))) + ); } @SuppressWarnings("unchecked") @@ -152,6 +178,69 @@ public void testOnIndexAvailableForSearchIndexAlreadyAvailable() throws Exceptio securityIndexManager.onIndexAvailableForSearch((ActionListener) future, TimeValue.timeValueSeconds(10)); future.actionGet(); } + + // check no remaining listeners + assertThat( + securityIndexManager.getStateChangeListeners(), + not(hasItem(instanceOf(SecurityIndexManager.StateConsumerWithCancellable.class))) + ); + } + + @SuppressWarnings("unchecked") + public void testOnIndexAvailableForSearchIndexUnderConcurrentLoad() throws Exception { + final SecurityIndexManager securityIndexManager = internalCluster().getInstances(NativePrivilegeStore.class) + .iterator() + .next() + .getSecurityIndexManager(); + // Long time out calls should all succeed + final List> futures = new ArrayList<>(); + for (int i = 0; i < concurrentCallsToOnAvailable / 2; i++) { + final Future future = executor.submit(() -> { + try { + final ActionFuture f = new PlainActionFuture<>(); + securityIndexManager.onIndexAvailableForSearch((ActionListener) f, TimeValue.timeValueSeconds(40)); + f.actionGet(); + } catch (Exception ex) { + fail(ex, "should not have encountered exception"); + } + return null; + }); + futures.add(future); + } + + // short time-out tasks should all time out + for (int i = 0; i < concurrentCallsToOnAvailable / 2; i++) { + final Future future = executor.submit(() -> { + expectThrows(ElasticsearchTimeoutException.class, () -> { + final ActionFuture f = new PlainActionFuture<>(); + securityIndexManager.onIndexAvailableForSearch((ActionListener) f, TimeValue.timeValueMillis(10)); + f.actionGet(); + }); + return null; + }); + futures.add(future); + } + + // Sleep a second for short-running calls to timeout + Thread.sleep(1000); + + createSecurityIndexWithWaitForActiveShards(); + // ensure security index manager state is fully in the expected precondition state for this test (ready for search) + assertBusy( + () -> assertThat(securityIndexManager.isAvailable(SecurityIndexManager.Availability.SEARCH_SHARDS), is(true)), + 30, + TimeUnit.SECONDS + ); + + for (var future : futures) { + future.get(10, TimeUnit.SECONDS); + } + + // check no remaining listeners + assertThat( + securityIndexManager.getStateChangeListeners(), + not(hasItem(instanceOf(SecurityIndexManager.StateConsumerWithCancellable.class))) + ); } @SuppressWarnings("unchecked") @@ -163,9 +252,24 @@ public void testOnIndexAvailableForSearchIndexWaitTimeOut() { .next() .getSecurityIndexManager(); - final ActionFuture future = new PlainActionFuture<>(); - securityIndexManager.onIndexAvailableForSearch((ActionListener) future, TimeValue.timeValueMillis(100)); - expectThrows(ElasticsearchTimeoutException.class, future::actionGet); + { + final ActionFuture future = new PlainActionFuture<>(); + securityIndexManager.onIndexAvailableForSearch((ActionListener) future, TimeValue.timeValueMillis(100)); + expectThrows(ElasticsearchTimeoutException.class, future::actionGet); + } + + // Also works with 0 timeout + { + final ActionFuture future = new PlainActionFuture<>(); + securityIndexManager.onIndexAvailableForSearch((ActionListener) future, TimeValue.timeValueMillis(0)); + expectThrows(ElasticsearchTimeoutException.class, future::actionGet); + } + + // check no remaining listeners + assertThat( + securityIndexManager.getStateChangeListeners(), + not(hasItem(instanceOf(SecurityIndexManager.StateConsumerWithCancellable.class))) + ); } public void testSecurityIndexSettingsCannotBeChanged() throws Exception { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 42a1d89a9aa00..d5cbbe8b349a7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -169,11 +169,9 @@ ResolvedIndices resolveIndicesAndAliasesWithoutWildcards(String action, IndicesR // and no remote clusters are configured that match it if (split.getLocal().isEmpty() && split.getRemote().isEmpty()) { for (String indexExpression : indices) { - String[] clusterAndIndex = indexExpression.split(":", 2); - if (clusterAndIndex.length == 2) { - if (clusterAndIndex[0].contains("*")) { - throw new NoSuchRemoteClusterException(clusterAndIndex[0]); - } + String[] clusterAndIndex = RemoteClusterAware.splitIndexName(indexExpression); + if (clusterAndIndex[0] != null && clusterAndIndex[0].contains("*")) { + throw new NoSuchRemoteClusterException(clusterAndIndex[0]); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptor.java index d10057ec7e740..d8ec078507bfe 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptor.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; @@ -18,7 +19,6 @@ import java.util.Arrays; -import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; import static org.elasticsearch.xpack.core.security.SecurityField.FIELD_LEVEL_SECURITY_FEATURE; @@ -55,6 +55,6 @@ && hasRemoteIndices(searchRequest) // package private for test static boolean hasRemoteIndices(SearchRequest request) { - return Arrays.stream(request.indices()).anyMatch(name -> name.indexOf(REMOTE_CLUSTER_INDEX_SEPARATOR) >= 0); + return Arrays.stream(request.indices()).anyMatch(RemoteClusterAware::isRemoteIndexName); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index a6c8de003c159..6d9b0ef6aeebe 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -54,6 +54,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -364,45 +365,80 @@ public void accept(State previousState, State nextState) { * Notifies {@code listener} once the security index is available, or calls {@code onFailure} on {@code timeout}. */ public void onIndexAvailableForSearch(ActionListener listener, TimeValue timeout) { - logger.info("Will wait for security index [{}] to become available for search", getConcreteIndexName()); + logger.info("Will wait for security index [{}] for [{}] to become available for search", getConcreteIndexName(), timeout); - final ActionListener notifyOnceListener = ActionListener.notifyOnce(listener); + if (state.indexAvailableForSearch) { + logger.debug("Security index [{}] is already available", getConcreteIndexName()); + listener.onResponse(null); + return; + } + final AtomicBoolean isDone = new AtomicBoolean(false); final var indexAvailableForSearchListener = new StateConsumerWithCancellable() { @Override public void accept(SecurityIndexManager.State previousState, SecurityIndexManager.State nextState) { if (nextState.indexAvailableForSearch) { - assert cancellable != null; - // cancel and removeStateListener are idempotent - cancellable.cancel(); - removeStateListener(this); - notifyOnceListener.onResponse(null); + if (isDone.compareAndSet(false, true)) { + cancel(); + removeStateListener(this); + listener.onResponse(null); + } } } }; + // add listener _before_ registering timeout -- this way we are guaranteed it gets removed (either by timeout below, or successful + // completion above) + addStateListener(indexAvailableForSearchListener); + // schedule failure handling on timeout -- keep reference to cancellable so a successful completion can cancel the timeout - indexAvailableForSearchListener.cancellable = client.threadPool().schedule(() -> { - removeStateListener(indexAvailableForSearchListener); - notifyOnceListener.onFailure( - new ElasticsearchTimeoutException( - "timed out waiting for security index [" + getConcreteIndexName() + "] to become available for search" - ) - ); - }, timeout, client.threadPool().generic()); + indexAvailableForSearchListener.setCancellable(client.threadPool().schedule(() -> { + if (isDone.compareAndSet(false, true)) { + removeStateListener(indexAvailableForSearchListener); + listener.onFailure( + new ElasticsearchTimeoutException( + "timed out waiting for security index [" + getConcreteIndexName() + "] to become available for search" + ) + ); + } + }, timeout, client.threadPool().generic())); + } - // in case the state has meanwhile changed to available, return immediately - if (state.indexAvailableForSearch) { - indexAvailableForSearchListener.cancellable.cancel(); - notifyOnceListener.onResponse(null); - } else { - addStateListener(indexAvailableForSearchListener); - } + // pkg-private for testing + List> getStateChangeListeners() { + return stateChangeListeners; } - private abstract static class StateConsumerWithCancellable + /** + * This class ensures that if cancel() is called _before_ setCancellable(), the passed-in cancellable is still correctly cancelled on + * a subsequent setCancellable() call. + */ + // pkg-private for testing + abstract static class StateConsumerWithCancellable implements - BiConsumer { - volatile Scheduler.ScheduledCancellable cancellable; + BiConsumer, + Scheduler.Cancellable { + private volatile Scheduler.ScheduledCancellable cancellable; + private volatile boolean cancelled = false; + + void setCancellable(Scheduler.ScheduledCancellable cancellable) { + this.cancellable = cancellable; + if (cancelled) { + cancel(); + } + } + + public boolean cancel() { + cancelled = true; + if (cancellable != null) { + // cancellable is idempotent, so it's fine to potentially call it multiple times + return cancellable.cancel(); + } + return isCancelled(); + } + + public boolean isCancelled() { + return cancelled; + } } private Tuple checkIndexAvailable(ClusterState state) { diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index d1eaff1bef1b2..df8dc54bb7490 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -623,7 +623,7 @@ public void testRecoveryIsCancelledAfterDeletingTheIndex() throws Exception { assertAcked(indicesAdmin().prepareDelete(indexName).get()); - assertBusy(mockLog::assertAllExpectationsMatched); + mockLog.awaitAllExpectationsMatched(); } respondToRecoverSnapshotFile.countDown(); diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/RepositoryAnalysisFailureIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/RepositoryAnalysisFailureIT.java index 1e9b7f23c60d5..b8acd9808a35e 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/RepositoryAnalysisFailureIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/RepositoryAnalysisFailureIT.java @@ -480,9 +480,10 @@ public boolean acceptsEmptyRegister() { } private RepositoryVerificationException analyseRepositoryExpectFailure(RepositoryAnalyzeAction.Request request) { - return asInstanceOf( + return safeAwaitAndUnwrapFailure( RepositoryVerificationException.class, - ExceptionsHelper.unwrapCause(safeAwaitFailure(RepositoryAnalyzeAction.Response.class, l -> analyseRepository(request, l))) + RepositoryAnalyzeAction.Response.class, + l -> analyseRepository(request, l) ); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml index 05edf6cdfb5a8..5a9a2a21e21bc 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml @@ -29,7 +29,7 @@ setup: - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" - - "Date format \\[MMMM\\] contains textual field specifiers that could change in JDK 23" + - "Date format \\[MMMM\\] contains textual field specifiers that could change in JDK 23.*" esql.query: body: query: 'FROM events | eval fixed_format = date_format("MMMM", @timestamp), variable_format = date_format(format, @timestamp) | sort @timestamp | keep @timestamp, fixed_format, variable_format' @@ -51,7 +51,7 @@ setup: - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" - - "Date format \\[MMMM\\] contains textual field specifiers that could change in JDK 23" + - "Date format \\[MMMM\\] contains textual field specifiers that could change in JDK 23.*" esql.query: body: query: 'FROM events | eval fixed_format = date_format("MMMM", @timestamp), variable_format = date_format(format, @timestamp) | sort @timestamp | keep @timestamp, fixed_format, variable_format' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/redact/10_redact_processor.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/redact/10_redact_processor.yml index 559d87879faad..e864d191a3ec1 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/redact/10_redact_processor.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/redact/10_redact_processor.yml @@ -24,7 +24,7 @@ index: test id: "1" pipeline: "pipeline-using-a-redact-processor" - body: {to_redact: "0.0.0.1 is my secret IP to redact"} + body: { to_redact: "0.0.0.1 is my secret IP to redact" } - do: get: @@ -96,3 +96,25 @@ } - length: { docs: 1 } - match: { docs.0.doc._source.to_redact: "==*EMAIL*== will be redacted" } +--- +"Test redact with trace_redact": + - do: + ingest.simulate: + body: > + { + "pipeline": { + "processors": [ + { + "redact": { + "field": "to_redact", + "patterns": ["%{EMAILADDRESS:EMAIL}", "%{IP:IP_ADDRESS}"], + "trace_redact": true + } + } + ] + }, + "docs": [{"_source": {"to_redact": "this-email@address.com will be redacted"}}] + } + - length: { docs: 1 } + - match: { docs.0.doc._source.to_redact: " will be redacted" } + - match: { docs.0.doc._ingest._redact._is_redacted: true } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/30_ignore_above_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/30_ignore_above_synthetic_source.yml index f5c9f3d92369a..2e3ba773fb0f2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/30_ignore_above_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/30_ignore_above_synthetic_source.yml @@ -49,7 +49,7 @@ wildcard field type ignore_above: - length: { hits.hits: 1 } - match: { hits.hits.0._source.a_wildcard: "foo bar" } - match: { hits.hits.0._source.b_wildcard: "the quick brown" } - - match: { hits.hits.0._source.c_wildcard: ["bar", "foo"] } + - match: { hits.hits.0._source.c_wildcard: ["bar", "foo", "jumps over the lazy dog"] } - match: { hits.hits.0._source.d_wildcard: ["bar", "foo", "the quick"] } - match: { hits.hits.0.fields.a_wildcard.0: "foo bar" } - match: { hits.hits.0.fields.b_wildcard.0: "the quick brown" } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java index 62d22c0c0a9cc..b2dc04c1178e4 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -51,12 +51,7 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { private final FeatureService featureService; private volatile boolean stackTemplateEnabled; - private static final Map ADDITIONAL_TEMPLATE_VARIABLES = Map.of( - "xpack.stack.template.deprecated", - "true", - "xpack.stack.template.logsdb.index.mode", - "standard" - ); + private static final Map ADDITIONAL_TEMPLATE_VARIABLES = Map.of("xpack.stack.template.deprecated", "true"); // General mappings conventions for any data that ends up in a data stream public static final String DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "data-streams-mappings"; diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java index cc127883652af..71d01798323d3 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java @@ -23,7 +23,7 @@ public StackPlugin(Settings settings) { @Override public List> getSettings() { - return List.of(StackTemplateRegistry.STACK_TEMPLATES_ENABLED, StackTemplateRegistry.CLUSTER_LOGSDB_ENABLED); + return List.of(StackTemplateRegistry.STACK_TEMPLATES_ENABLED); } @Override diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 592842f61eee8..b45f17e434388 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.index.IndexMode; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -36,6 +35,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.cluster.settings.ClusterSettings.CLUSTER_LOGSDB_ENABLED; + public class StackTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(StackTemplateRegistry.class); @@ -58,15 +59,6 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { Setting.Property.Dynamic ); - /** - * if index.mode "logsdb" is applied by default in logs@settings for 'logs-*-*' - */ - public static final Setting CLUSTER_LOGSDB_ENABLED = Setting.boolSetting( - "cluster.logsdb.enabled", - false, - Setting.Property.NodeScope - ); - private final ClusterService clusterService; private final FeatureService featureService; private final Map componentTemplateConfigs; @@ -167,15 +159,10 @@ private Map loadComponentTemplateConfigs(boolean logs ), new IndexTemplateConfig( LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME, - "/logs@settings.json", + logsDbEnabled ? "/logs@settings-logsdb.json" : "/logs@settings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE, - Map.of( - "xpack.stack.template.deprecated", - "false", - "xpack.stack.template.logsdb.index.mode", - logsDbEnabled ? IndexMode.LOGSDB.getName() : IndexMode.STANDARD.getName() - ) + Map.of("xpack.stack.template.deprecated", "false") ), new IndexTemplateConfig( METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestFindStructureAction.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestFindStructureAction.java index 65325f2268ed2..03b18744eba2a 100644 --- a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestFindStructureAction.java +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestFindStructureAction.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,10 +27,17 @@ @ServerlessScope(Scope.INTERNAL) public class RestFindStructureAction extends BaseRestHandler { + @UpdateForV9 + // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access + // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will + // continue to emit deprecations warnings but will not require any special headers to access the API in v9. + // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return List.of( - Route.builder(POST, BASE_PATH + "find_structure").replaces(POST, "/_ml/find_file_structure", RestApiVersion.V_8).build() + // Route.builder(POST, BASE_PATH + "find_structure").replaces(POST, "/_ml/find_file_structure", RestApiVersion.V_8).build() + new Route(POST, BASE_PATH + "find_structure"), + Route.builder(POST, "/_ml/find_file_structure").deprecateAndKeep("Use the _text_structure API instead.").build() ); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java index 1c38ed50ede39..3df47fb3bc066 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.test.MockLog.LoggingExpectation; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; +import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -358,13 +359,18 @@ public void testCreateNextCheckpointWithRemoteClient() throws InterruptedExcepti String transformId = getTestName(); TransformConfig transformConfig = TransformConfigTests.randomTransformConfig(transformId); + doAnswer(withMockConnection()).when(remoteClient1).getConnection(any(), any()); + doAnswer(withMockConnection()).when(remoteClient2).getConnection(any(), any()); + doAnswer(withMockConnection()).when(remoteClient3).getConnection(any(), any()); + GetCheckpointAction.Response checkpointResponse = new GetCheckpointAction.Response(Map.of("index-1", new long[] { 1L, 2L, 3L })); doAnswer(withResponse(checkpointResponse)).when(client).execute(eq(GetCheckpointAction.INSTANCE), any(), any()); GetCheckpointAction.Response remoteCheckpointResponse = new GetCheckpointAction.Response( Map.of("index-1", new long[] { 4L, 5L, 6L, 7L, 8L }) ); - doAnswer(withResponse(remoteCheckpointResponse)).when(remoteClient1).execute(eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); + doAnswer(withRemoteResponse(remoteCheckpointResponse)).when(remoteClient1) + .execute(any(), eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); RemoteClusterResolver remoteClusterResolver = mock(RemoteClusterResolver.class); @@ -401,18 +407,25 @@ public void testCreateNextCheckpointWithRemoteClients() throws InterruptedExcept String transformId = getTestName(); TransformConfig transformConfig = TransformConfigTests.randomTransformConfig(transformId); + doAnswer(withMockConnection()).when(remoteClient1).getConnection(any(), any()); + doAnswer(withMockConnection()).when(remoteClient2).getConnection(any(), any()); + doAnswer(withMockConnection()).when(remoteClient3).getConnection(any(), any()); + GetCheckpointAction.Response remoteCheckpointResponse1 = new GetCheckpointAction.Response( Map.of("index-1", new long[] { 1L, 2L, 3L }) ); - doAnswer(withResponse(remoteCheckpointResponse1)).when(remoteClient1).execute(eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); + doAnswer(withRemoteResponse(remoteCheckpointResponse1)).when(remoteClient1) + .execute(any(), eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); GetCheckpointAction.Response remoteCheckpointResponse2 = new GetCheckpointAction.Response( Map.of("index-1", new long[] { 4L, 5L, 6L, 7L, 8L }) ); - doAnswer(withResponse(remoteCheckpointResponse2)).when(remoteClient2).execute(eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); + doAnswer(withRemoteResponse(remoteCheckpointResponse2)).when(remoteClient2) + .execute(any(), eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); GetCheckpointAction.Response remoteCheckpointResponse3 = new GetCheckpointAction.Response(Map.of("index-1", new long[] { 9L })); - doAnswer(withResponse(remoteCheckpointResponse3)).when(remoteClient3).execute(eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); + doAnswer(withRemoteResponse(remoteCheckpointResponse3)).when(remoteClient3) + .execute(any(), eq(GetCheckpointAction.REMOTE_TYPE), any(), any()); RemoteClusterResolver remoteClusterResolver = mock(RemoteClusterResolver.class); @@ -483,4 +496,20 @@ private static Answer withResponse(Response response) { return null; }; } + + private static Answer withRemoteResponse(Response response) { + return invocationOnMock -> { + ActionListener listener = invocationOnMock.getArgument(3); + listener.onResponse(response); + return null; + }; + } + + private static Answer withMockConnection() { + return invocationOnMock -> { + ActionListener listener = invocationOnMock.getArgument(1); + listener.onResponse(mock(Transport.Connection.class)); + return null; + }; + } } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 1e97e64371586..7784e7ffdda12 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -994,7 +994,7 @@ public FieldMapper.Builder getMergeBuilder() { protected SyntheticSourceSupport syntheticSourceSupport() { var layers = new ArrayList(); layers.add(new WildcardSyntheticFieldLoader()); - if (ignoreAbove != ignoreAboveDefault) { + if (ignoreAbove != Integer.MAX_VALUE) { layers.add(new CompositeSyntheticFieldLoader.StoredFieldLayer(originalName()) { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java index 484e2ed3ac9c3..abc09a6563ee9 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java @@ -16,10 +16,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Strings; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.junit.Before; @@ -195,14 +197,30 @@ private Response startDeployment(String modelId) throws IOException { } private Response startDeployment(String modelId, String waitForState) throws IOException { + String inferenceThreadParamName = "threads_per_allocation"; + String modelThreadParamName = "number_of_allocations"; + String compatibleHeader = null; + if (isRunningAgainstOldCluster()) { + compatibleHeader = compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_8); + inferenceThreadParamName = "inference_threads"; + modelThreadParamName = "model_threads"; + } + Request request = new Request( "POST", "/_ml/trained_models/" + modelId + "/deployment/_start?timeout=40s&wait_for=" + waitForState - + "&inference_threads=1&model_threads=1" + + "&" + + inferenceThreadParamName + + "=1&" + + modelThreadParamName + + "=1" ); + if (compatibleHeader != null) { + request.setOptions(request.getOptions().toBuilder().addHeader("Accept", compatibleHeader).build()); + } request.setOptions(request.getOptions().toBuilder().setWarningsHandler(PERMISSIVE).build()); var response = client().performRequest(request); assertOK(response); diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index c8c72855eaf7a..d61c143098fcb 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -370,14 +370,10 @@ private void assertDocs( Version oldVersion, int numberOfShards ) throws IOException { - RequestOptions v7RequestOptions = RequestOptions.DEFAULT.toBuilder() - .addHeader("Content-Type", "application/vnd.elasticsearch+json;compatible-with=7") - .addHeader("Accept", "application/vnd.elasticsearch+json;compatible-with=7") - .build(); - RequestOptions randomRequestOptions = randomBoolean() ? RequestOptions.DEFAULT : v7RequestOptions; + RequestOptions requestOptions = RequestOptions.DEFAULT; // run a search against the index - SearchResponse searchResponse = search(index, null, randomRequestOptions); + SearchResponse searchResponse = search(index, null, requestOptions); try { logger.info(searchResponse); // check hit count @@ -404,7 +400,7 @@ private void assertDocs( SearchSourceBuilder.searchSource() .query(QueryBuilders.matchQuery("val", num)) .runtimeMappings(Map.of("val", Map.of("type", "long"))), - randomRequestOptions + requestOptions ); try { logger.info(searchResponse); @@ -422,7 +418,7 @@ private void assertDocs( SearchSourceBuilder.searchSource() .query(QueryBuilders.matchAllQuery()) .sort(SortBuilders.fieldSort("val").order(SortOrder.DESC)), - randomRequestOptions + requestOptions ); try { logger.info(searchResponse); @@ -439,7 +435,7 @@ private void assertDocs( searchResponse = search( index, SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("test", "test" + num)), - randomRequestOptions + requestOptions ); try { logger.info(searchResponse); @@ -456,7 +452,7 @@ private void assertDocs( searchResponse = search( index, SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("_type", randomType)), - randomRequestOptions + requestOptions ); try { logger.info(searchResponse); @@ -482,7 +478,7 @@ private void assertDocs( searchResponse = search( index, SearchSourceBuilder.searchSource().query(QueryBuilders.rangeQuery("create_date").from("2020-02-01")), - randomRequestOptions + requestOptions ); try { logger.info(searchResponse); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java index d935672e0a243..553d5e7425de7 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java @@ -12,8 +12,10 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Strings; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.xcontent.XContentType; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -242,14 +244,30 @@ private Response startDeployment(String modelId) throws IOException { } private Response startDeployment(String modelId, String waitForState) throws IOException { + String inferenceThreadParamName = "threads_per_allocation"; + String modelThreadParamName = "number_of_allocations"; + String compatibleHeader = null; + if (CLUSTER_TYPE.equals(ClusterType.OLD) || CLUSTER_TYPE.equals(ClusterType.MIXED)) { + compatibleHeader = compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_8); + inferenceThreadParamName = "inference_threads"; + modelThreadParamName = "model_threads"; + } + Request request = new Request( "POST", "/_ml/trained_models/" + modelId + "/deployment/_start?timeout=40s&wait_for=" + waitForState - + "&inference_threads=1&model_threads=1" + + "&" + + inferenceThreadParamName + + "=1&" + + modelThreadParamName + + "=1" ); + if (compatibleHeader != null) { + request.setOptions(request.getOptions().toBuilder().addHeader("Accept", compatibleHeader).build()); + } request.setOptions(request.getOptions().toBuilder().setWarningsHandler(PERMISSIVE).build()); var response = client().performRequest(request); assertOK(response); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java index 6d34ef5887629..88118f6c2727a 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java @@ -12,11 +12,13 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Strings; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.rest.RestTestLegacyFeatures; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; @@ -278,14 +280,30 @@ private Response startDeployment(String modelId) throws IOException { } private Response startDeployment(String modelId, String waitForState) throws IOException { + String inferenceThreadParamName = "threads_per_allocation"; + String modelThreadParamName = "number_of_allocations"; + String compatibleHeader = null; + if (CLUSTER_TYPE.equals(ClusterType.OLD) || CLUSTER_TYPE.equals(ClusterType.MIXED)) { + compatibleHeader = compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_8); + inferenceThreadParamName = "inference_threads"; + modelThreadParamName = "model_threads"; + } + Request request = new Request( "POST", "/_ml/trained_models/" + modelId + "/deployment/_start?timeout=40s&wait_for=" + waitForState - + "&inference_threads=1&model_threads=1" + + "&" + + inferenceThreadParamName + + "=1&" + + modelThreadParamName + + "=1" ); + if (compatibleHeader != null) { + request.setOptions(request.getOptions().toBuilder().addHeader("Accept", compatibleHeader).build()); + } request.setOptions(request.getOptions().toBuilder().setWarningsHandler(PERMISSIVE).build()); var response = client().performRequest(request); assertOK(response);